[ 485.011695] env[62503]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'linux_bridge' {{(pid=62503) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 485.012091] env[62503]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'noop' {{(pid=62503) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 485.012192] env[62503]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'ovs' {{(pid=62503) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 485.012451] env[62503]: INFO os_vif [-] Loaded VIF plugins: linux_bridge, noop, ovs [ 485.107718] env[62503]: DEBUG oslo_concurrency.processutils [-] Running cmd (subprocess): grep -F node.session.scan /sbin/iscsiadm {{(pid=62503) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:390}} [ 485.118221] env[62503]: DEBUG oslo_concurrency.processutils [-] CMD "grep -F node.session.scan /sbin/iscsiadm" returned: 0 in 0.010s {{(pid=62503) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:428}} [ 485.725750] env[62503]: INFO nova.virt.driver [None req-c4b6ba0c-2362-49b4-bf1a-b5139ca02534 None None] Loading compute driver 'vmwareapi.VMwareVCDriver' [ 485.795568] env[62503]: DEBUG oslo_concurrency.lockutils [-] Acquiring lock "oslo_vmware_api_lock" by "oslo_vmware.api.VMwareAPISession._create_session" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 485.795725] env[62503]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" acquired by "oslo_vmware.api.VMwareAPISession._create_session" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 485.795824] env[62503]: DEBUG oslo_vmware.service [-] Creating suds client with soap_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk' and wsdl_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk/vimService.wsdl' {{(pid=62503) __init__ /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:242}} [ 488.950357] env[62503]: DEBUG oslo_vmware.service [-] Invoking ServiceInstance.RetrieveServiceContent with opID=oslo.vmware-e991f41d-03da-44eb-a955-d02d1334f62e {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 488.966979] env[62503]: DEBUG oslo_vmware.api [-] Logging into host: vc1.osci.c.eu-de-1.cloud.sap. {{(pid=62503) _create_session /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:242}} [ 488.967164] env[62503]: DEBUG oslo_vmware.service [-] Invoking SessionManager.Login with opID=oslo.vmware-69fc4d7c-af53-440a-a1b5-f7a875c90f37 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 488.998418] env[62503]: INFO oslo_vmware.api [-] Successfully established new session; session ID is d0d2f. [ 488.998563] env[62503]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" "released" by "oslo_vmware.api.VMwareAPISession._create_session" :: held 3.203s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 488.999111] env[62503]: INFO nova.virt.vmwareapi.driver [None req-c4b6ba0c-2362-49b4-bf1a-b5139ca02534 None None] VMware vCenter version: 7.0.3 [ 489.002413] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09b84379-8f3f-4d0c-bc41-d0a23a2f5ac5 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 489.019055] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35f464b2-1000-40b0-8bfa-f60f4004b658 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 489.024671] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37a6274c-c6bb-488c-9ca0-467ade410bf6 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 489.031050] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-884d8d3b-b3d8-4457-af81-2d0222205df2 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 489.043697] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2078a80d-e798-4365-a11b-62e7fedf7385 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 489.049314] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66347c1f-3141-4331-a13d-af891b857196 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 489.079305] env[62503]: DEBUG oslo_vmware.service [-] Invoking ExtensionManager.FindExtension with opID=oslo.vmware-60f6856c-616a-4ebe-a3d0-13919ed3f4ca {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 489.084137] env[62503]: DEBUG nova.virt.vmwareapi.driver [None req-c4b6ba0c-2362-49b4-bf1a-b5139ca02534 None None] Extension org.openstack.compute already exists. {{(pid=62503) _register_openstack_extension /opt/stack/nova/nova/virt/vmwareapi/driver.py:228}} [ 489.086802] env[62503]: INFO nova.compute.provider_config [None req-c4b6ba0c-2362-49b4-bf1a-b5139ca02534 None None] No provider configs found in /etc/nova/provider_config/. If files are present, ensure the Nova process has access. [ 489.590021] env[62503]: DEBUG nova.context [None req-c4b6ba0c-2362-49b4-bf1a-b5139ca02534 None None] Found 2 cells: 00000000-0000-0000-0000-000000000000(cell0),9fc8901d-68b4-444c-a167-e26c3e128a0c(cell1) {{(pid=62503) load_cells /opt/stack/nova/nova/context.py:464}} [ 489.592135] env[62503]: DEBUG oslo_concurrency.lockutils [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] Acquiring lock "00000000-0000-0000-0000-000000000000" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 489.592376] env[62503]: DEBUG oslo_concurrency.lockutils [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] Lock "00000000-0000-0000-0000-000000000000" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 489.593047] env[62503]: DEBUG oslo_concurrency.lockutils [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] Lock "00000000-0000-0000-0000-000000000000" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 489.593498] env[62503]: DEBUG oslo_concurrency.lockutils [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] Acquiring lock "9fc8901d-68b4-444c-a167-e26c3e128a0c" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 489.593696] env[62503]: DEBUG oslo_concurrency.lockutils [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] Lock "9fc8901d-68b4-444c-a167-e26c3e128a0c" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 489.594703] env[62503]: DEBUG oslo_concurrency.lockutils [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] Lock "9fc8901d-68b4-444c-a167-e26c3e128a0c" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 489.614842] env[62503]: INFO dbcounter [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] Registered counter for database nova_cell0 [ 489.623190] env[62503]: INFO dbcounter [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] Registered counter for database nova_cell1 [ 489.629019] env[62503]: DEBUG oslo_db.sqlalchemy.engines [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=62503) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:342}} [ 489.629019] env[62503]: DEBUG oslo_db.sqlalchemy.engines [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=62503) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:342}} [ 489.631748] env[62503]: ERROR nova.db.main.api [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 489.631748] env[62503]: result = function(*args, **kwargs) [ 489.631748] env[62503]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 489.631748] env[62503]: return func(*args, **kwargs) [ 489.631748] env[62503]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 489.631748] env[62503]: result = fn(*args, **kwargs) [ 489.631748] env[62503]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 489.631748] env[62503]: return f(*args, **kwargs) [ 489.631748] env[62503]: File "/opt/stack/nova/nova/objects/service.py", line 556, in _db_service_get_minimum_version [ 489.631748] env[62503]: return db.service_get_minimum_version(context, binaries) [ 489.631748] env[62503]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 489.631748] env[62503]: _check_db_access() [ 489.631748] env[62503]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 489.631748] env[62503]: stacktrace = ''.join(traceback.format_stack()) [ 489.631748] env[62503]: [ 489.632461] env[62503]: ERROR nova.db.main.api [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 489.632461] env[62503]: result = function(*args, **kwargs) [ 489.632461] env[62503]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 489.632461] env[62503]: return func(*args, **kwargs) [ 489.632461] env[62503]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 489.632461] env[62503]: result = fn(*args, **kwargs) [ 489.632461] env[62503]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 489.632461] env[62503]: return f(*args, **kwargs) [ 489.632461] env[62503]: File "/opt/stack/nova/nova/objects/service.py", line 556, in _db_service_get_minimum_version [ 489.632461] env[62503]: return db.service_get_minimum_version(context, binaries) [ 489.632461] env[62503]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 489.632461] env[62503]: _check_db_access() [ 489.632461] env[62503]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 489.632461] env[62503]: stacktrace = ''.join(traceback.format_stack()) [ 489.632461] env[62503]: [ 489.633156] env[62503]: WARNING nova.objects.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] Failed to get minimum service version for cell 9fc8901d-68b4-444c-a167-e26c3e128a0c [ 489.633156] env[62503]: WARNING nova.objects.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] Failed to get minimum service version for cell 00000000-0000-0000-0000-000000000000 [ 489.633841] env[62503]: DEBUG oslo_concurrency.lockutils [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] Acquiring lock "singleton_lock" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 489.633841] env[62503]: DEBUG oslo_concurrency.lockutils [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] Acquired lock "singleton_lock" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 489.633841] env[62503]: DEBUG oslo_concurrency.lockutils [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] Releasing lock "singleton_lock" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 489.634177] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] Full set of CONF: {{(pid=62503) _wait_for_exit_or_signal /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/service.py:363}} [ 489.634322] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] ******************************************************************************** {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2806}} [ 489.634451] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] Configuration options gathered from: {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2807}} [ 489.634589] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] command line args: ['--config-file', '/etc/nova/nova.conf', '--config-file', '/etc/nova/nova-cpu-common.conf', '--config-file', '/etc/nova/nova-cpu-1.conf'] {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2808}} [ 489.634783] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] config files: ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2809}} [ 489.634913] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] ================================================================================ {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2811}} [ 489.635139] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] allow_resize_to_same_host = True {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.635314] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] arq_binding_timeout = 300 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.635485] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] backdoor_port = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.635627] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] backdoor_socket = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.635796] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] block_device_allocate_retries = 60 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.635958] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] block_device_allocate_retries_interval = 3 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.636145] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] cert = self.pem {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.636317] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] compute_driver = vmwareapi.VMwareVCDriver {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.636536] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] compute_monitors = [] {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.636722] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] config_dir = [] {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.636900] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] config_drive_format = iso9660 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.637056] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] config_file = ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.637231] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] config_source = [] {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.637406] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] console_host = devstack {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.637577] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] control_exchange = nova {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.637758] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] cpu_allocation_ratio = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.637937] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] daemon = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.638130] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] debug = True {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.638296] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] default_access_ip_network_name = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.638466] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] default_availability_zone = nova {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.638628] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] default_ephemeral_format = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.638787] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] default_green_pool_size = 1000 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.639035] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] default_log_levels = ['amqp=WARN', 'amqplib=WARN', 'boto=WARN', 'qpid=WARN', 'sqlalchemy=WARN', 'suds=INFO', 'oslo.messaging=INFO', 'oslo_messaging=INFO', 'iso8601=WARN', 'requests.packages.urllib3.connectionpool=WARN', 'urllib3.connectionpool=WARN', 'websocket=WARN', 'requests.packages.urllib3.util.retry=WARN', 'urllib3.util.retry=WARN', 'keystonemiddleware=WARN', 'routes.middleware=WARN', 'stevedore=WARN', 'taskflow=WARN', 'keystoneauth=WARN', 'oslo.cache=INFO', 'oslo_policy=INFO', 'dogpile.core.dogpile=INFO', 'glanceclient=WARN', 'oslo.privsep.daemon=INFO'] {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.639209] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] default_schedule_zone = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.639373] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] disk_allocation_ratio = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.639540] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] enable_new_services = True {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.639723] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] enabled_apis = ['osapi_compute'] {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.639912] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] enabled_ssl_apis = [] {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.640091] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] flat_injected = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.640255] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] force_config_drive = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.640419] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] force_raw_images = True {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.640591] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] graceful_shutdown_timeout = 5 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.640754] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] heal_instance_info_cache_interval = 60 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.640974] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] host = cpu-1 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.641169] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] initial_cpu_allocation_ratio = 4.0 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.641337] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] initial_disk_allocation_ratio = 1.0 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.641503] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] initial_ram_allocation_ratio = 1.0 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.641718] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] injected_network_template = /opt/stack/nova/nova/virt/interfaces.template {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.641886] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] instance_build_timeout = 0 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.642061] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] instance_delete_interval = 300 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.642239] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] instance_format = [instance: %(uuid)s] {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.642407] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] instance_name_template = instance-%08x {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.642607] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] instance_usage_audit = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.642786] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] instance_usage_audit_period = month {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.642955] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] instance_uuid_format = [instance: %(uuid)s] {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.643140] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] instances_path = /opt/stack/data/nova/instances {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.643311] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] internal_service_availability_zone = internal {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.643472] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] key = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.643636] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] live_migration_retry_count = 30 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.643807] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] log_color = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.643971] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] log_config_append = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.644155] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] log_date_format = %Y-%m-%d %H:%M:%S {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.644319] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] log_dir = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.644483] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] log_file = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.644617] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] log_options = True {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.644780] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] log_rotate_interval = 1 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.644955] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] log_rotate_interval_type = days {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.645137] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] log_rotation_type = none {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.645272] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] logging_context_format_string = %(color)s%(levelname)s %(name)s [%(global_request_id)s %(request_id)s %(project_name)s %(user_name)s%(color)s] %(instance)s%(color)s%(message)s {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.645427] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] logging_debug_format_suffix = {{(pid=%(process)d) %(funcName)s %(pathname)s:%(lineno)d}} {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.645613] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] logging_default_format_string = %(color)s%(levelname)s %(name)s [-%(color)s] %(instance)s%(color)s%(message)s {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.645785] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] logging_exception_prefix = ERROR %(name)s %(instance)s {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.645917] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] logging_user_identity_format = %(user)s %(project)s %(domain)s %(system_scope)s %(user_domain)s %(project_domain)s {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.646095] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] long_rpc_timeout = 1800 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.646264] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] max_concurrent_builds = 10 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.646462] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] max_concurrent_live_migrations = 1 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.646637] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] max_concurrent_snapshots = 5 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.646804] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] max_local_block_devices = 3 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.646966] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] max_logfile_count = 30 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.647141] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] max_logfile_size_mb = 200 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.647304] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] maximum_instance_delete_attempts = 5 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.647476] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] metadata_listen = 0.0.0.0 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.647648] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] metadata_listen_port = 8775 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.647853] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] metadata_workers = 2 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.648033] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] migrate_max_retries = -1 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.648210] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] mkisofs_cmd = genisoimage {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.648421] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] my_block_storage_ip = 10.180.1.21 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.648558] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] my_ip = 10.180.1.21 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.648767] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] my_shared_fs_storage_ip = 10.180.1.21 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.648936] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] network_allocate_retries = 0 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.649297] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] non_inheritable_image_properties = ['cache_in_nova', 'bittorrent'] {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.649297] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] osapi_compute_listen = 0.0.0.0 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.649461] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] osapi_compute_listen_port = 8774 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.649625] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] osapi_compute_unique_server_name_scope = {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.649797] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] osapi_compute_workers = 2 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.649961] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] password_length = 12 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.650135] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] periodic_enable = True {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.650295] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] periodic_fuzzy_delay = 60 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.650467] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] pointer_model = usbtablet {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.650635] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] preallocate_images = none {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.650796] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] publish_errors = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.650926] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] pybasedir = /opt/stack/nova {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.651096] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] ram_allocation_ratio = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.651263] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] rate_limit_burst = 0 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.651431] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] rate_limit_except_level = CRITICAL {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.651592] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] rate_limit_interval = 0 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.651757] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] reboot_timeout = 0 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.651940] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] reclaim_instance_interval = 0 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.652115] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] record = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.652289] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] reimage_timeout_per_gb = 60 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.652457] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] report_interval = 120 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.652620] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] rescue_timeout = 0 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.652781] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] reserved_host_cpus = 0 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.652940] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] reserved_host_disk_mb = 0 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.653113] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] reserved_host_memory_mb = 512 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.653279] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] reserved_huge_pages = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.653442] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] resize_confirm_window = 0 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.653602] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] resize_fs_using_block_device = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.653762] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] resume_guests_state_on_host_boot = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.653931] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] rootwrap_config = /etc/nova/rootwrap.conf {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.654106] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] rpc_response_timeout = 60 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.654272] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] run_external_periodic_tasks = True {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.654443] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] running_deleted_instance_action = reap {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.654639] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] running_deleted_instance_poll_interval = 1800 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.654805] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] running_deleted_instance_timeout = 0 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.654966] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] scheduler_instance_sync_interval = 120 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.655148] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] service_down_time = 720 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.655318] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] servicegroup_driver = db {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.655510] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] shell_completion = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.655679] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] shelved_offload_time = 0 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.655841] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] shelved_poll_interval = 3600 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.656016] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] shutdown_timeout = 0 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.656186] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] source_is_ipv6 = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.656348] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] ssl_only = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.656625] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] state_path = /opt/stack/data/n-cpu-1 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.656798] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] sync_power_state_interval = 600 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.656963] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] sync_power_state_pool_size = 1000 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.657149] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] syslog_log_facility = LOG_USER {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.657311] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] tempdir = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.657476] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] timeout_nbd = 10 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.657649] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] transport_url = **** {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.657812] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] update_resources_interval = 0 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.657973] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] use_cow_images = True {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.658147] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] use_eventlog = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.658309] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] use_journal = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.658507] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] use_json = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.658693] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] use_rootwrap_daemon = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.658858] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] use_stderr = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.659033] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] use_syslog = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.659200] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] vcpu_pin_set = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.659369] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] vif_plugging_is_fatal = True {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.659537] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] vif_plugging_timeout = 300 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.659705] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] virt_mkfs = [] {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.659870] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] volume_usage_poll_interval = 0 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.660042] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] watch_log_file = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.660218] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] web = /usr/share/spice-html5 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 489.660404] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] os_brick.lock_path = /opt/stack/data/n-cpu-1 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.660577] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] os_brick.wait_mpath_device_attempts = 4 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.660744] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] os_brick.wait_mpath_device_interval = 1 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.660916] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] oslo_concurrency.disable_process_locking = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.661227] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] oslo_concurrency.lock_path = /opt/stack/data/n-cpu-1 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.661413] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] oslo_messaging_metrics.metrics_buffer_size = 1000 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.661586] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] oslo_messaging_metrics.metrics_enabled = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.661759] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] oslo_messaging_metrics.metrics_process_name = {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.661930] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] oslo_messaging_metrics.metrics_socket_file = /var/tmp/metrics_collector.sock {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.662118] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] oslo_messaging_metrics.metrics_thread_stop_timeout = 10 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.662309] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] api.auth_strategy = keystone {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.662480] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] api.compute_link_prefix = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.662658] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] api.config_drive_skip_versions = 1.0 2007-01-19 2007-03-01 2007-08-29 2007-10-10 2007-12-15 2008-02-01 2008-09-01 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.662834] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] api.dhcp_domain = novalocal {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.663014] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] api.enable_instance_password = True {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.663199] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] api.glance_link_prefix = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.663374] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] api.instance_list_cells_batch_fixed_size = 100 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.663548] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] api.instance_list_cells_batch_strategy = distributed {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.663713] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] api.instance_list_per_project_cells = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.663877] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] api.list_records_by_skipping_down_cells = True {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.664055] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] api.local_metadata_per_cell = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.664231] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] api.max_limit = 1000 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.664404] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] api.metadata_cache_expiration = 15 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.664584] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] api.neutron_default_tenant_id = default {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.664759] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] api.response_validation = warn {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.664929] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] api.use_neutron_default_nets = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.665133] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] api.vendordata_dynamic_connect_timeout = 5 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.665304] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] api.vendordata_dynamic_failure_fatal = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.665501] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] api.vendordata_dynamic_read_timeout = 5 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.665685] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] api.vendordata_dynamic_ssl_certfile = {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.665856] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] api.vendordata_dynamic_targets = [] {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.666031] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] api.vendordata_jsonfile_path = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.666223] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] api.vendordata_providers = ['StaticJSON'] {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.666422] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] cache.backend = dogpile.cache.memcached {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.666618] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] cache.backend_argument = **** {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.666800] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] cache.config_prefix = cache.oslo {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.666973] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] cache.dead_timeout = 60.0 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.667157] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] cache.debug_cache_backend = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.667324] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] cache.enable_retry_client = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.667492] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] cache.enable_socket_keepalive = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.667668] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] cache.enabled = True {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.667834] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] cache.enforce_fips_mode = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.668008] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] cache.expiration_time = 600 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.668183] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] cache.hashclient_retry_attempts = 2 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.668355] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] cache.hashclient_retry_delay = 1.0 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.668523] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] cache.memcache_dead_retry = 300 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.668685] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] cache.memcache_password = **** {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.668852] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] cache.memcache_pool_connection_get_timeout = 10 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.669022] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] cache.memcache_pool_flush_on_reconnect = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.669196] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] cache.memcache_pool_maxsize = 10 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.669364] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] cache.memcache_pool_unused_timeout = 60 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.669533] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] cache.memcache_sasl_enabled = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.669718] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] cache.memcache_servers = ['localhost:11211'] {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.669888] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] cache.memcache_socket_timeout = 1.0 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.670066] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] cache.memcache_username = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.670241] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] cache.proxies = [] {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.670409] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] cache.redis_db = 0 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.670573] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] cache.redis_password = **** {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.670748] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] cache.redis_sentinel_service_name = mymaster {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.670927] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] cache.redis_sentinels = ['localhost:26379'] {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.671114] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] cache.redis_server = localhost:6379 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.671288] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] cache.redis_socket_timeout = 1.0 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.671450] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] cache.redis_username = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.671617] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] cache.retry_attempts = 2 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.671786] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] cache.retry_delay = 0.0 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.671949] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] cache.socket_keepalive_count = 1 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.672126] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] cache.socket_keepalive_idle = 1 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.672291] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] cache.socket_keepalive_interval = 1 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.672454] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] cache.tls_allowed_ciphers = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.672618] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] cache.tls_cafile = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.672779] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] cache.tls_certfile = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.672943] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] cache.tls_enabled = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.673118] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] cache.tls_keyfile = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.673296] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] cinder.auth_section = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.673474] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] cinder.auth_type = password {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.673641] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] cinder.cafile = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.673820] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] cinder.catalog_info = volumev3::publicURL {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.673984] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] cinder.certfile = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.674166] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] cinder.collect_timing = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.674335] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] cinder.cross_az_attach = True {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.674499] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] cinder.debug = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.674666] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] cinder.endpoint_template = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.674833] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] cinder.http_retries = 3 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.674999] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] cinder.insecure = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.675176] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] cinder.keyfile = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.675372] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] cinder.os_region_name = RegionOne {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.675550] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] cinder.split_loggers = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.675718] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] cinder.timeout = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.675896] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] compute.consecutive_build_service_disable_threshold = 10 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.676074] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] compute.cpu_dedicated_set = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.676243] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] compute.cpu_shared_set = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.676415] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] compute.image_type_exclude_list = [] {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.676613] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] compute.live_migration_wait_for_vif_plug = True {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.676788] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] compute.max_concurrent_disk_ops = 0 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.676954] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] compute.max_disk_devices_to_attach = -1 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.677136] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] compute.packing_host_numa_cells_allocation_strategy = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.677308] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] compute.provider_config_location = /etc/nova/provider_config/ {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.677475] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] compute.resource_provider_association_refresh = 300 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.677640] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] compute.sharing_providers_max_uuids_per_request = 200 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.677804] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] compute.shutdown_retry_interval = 10 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.677987] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] compute.vmdk_allowed_types = ['streamOptimized', 'monolithicSparse'] {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.678186] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] conductor.workers = 2 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.678377] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] console.allowed_origins = [] {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.678552] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] console.ssl_ciphers = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.678726] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] console.ssl_minimum_version = default {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.678897] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] consoleauth.enforce_session_timeout = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.679081] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] consoleauth.token_ttl = 600 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.679255] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] cyborg.cafile = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.679417] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] cyborg.certfile = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.679584] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] cyborg.collect_timing = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.679744] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] cyborg.connect_retries = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.679903] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] cyborg.connect_retry_delay = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.680077] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] cyborg.endpoint_override = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.680245] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] cyborg.insecure = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.680405] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] cyborg.keyfile = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.680567] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] cyborg.max_version = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.680726] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] cyborg.min_version = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.680887] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] cyborg.region_name = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.681061] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] cyborg.retriable_status_codes = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.681227] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] cyborg.service_name = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.681399] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] cyborg.service_type = accelerator {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.681597] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] cyborg.split_loggers = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.681779] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] cyborg.status_code_retries = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.681940] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] cyborg.status_code_retry_delay = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.682117] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] cyborg.timeout = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.682304] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] cyborg.valid_interfaces = ['internal', 'public'] {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.682468] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] cyborg.version = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.682651] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] database.backend = sqlalchemy {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.682824] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] database.connection = **** {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.682991] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] database.connection_debug = 0 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.683179] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] database.connection_parameters = {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.683347] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] database.connection_recycle_time = 3600 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.683511] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] database.connection_trace = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.683729] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] database.db_inc_retry_interval = True {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.683928] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] database.db_max_retries = 20 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.684112] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] database.db_max_retry_interval = 10 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.684283] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] database.db_retry_interval = 1 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.684449] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] database.max_overflow = 50 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.684616] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] database.max_pool_size = 5 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.684779] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] database.max_retries = 10 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.684950] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] database.mysql_sql_mode = TRADITIONAL {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.685125] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] database.mysql_wsrep_sync_wait = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.685288] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] database.pool_timeout = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.685488] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] database.retry_interval = 10 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.685657] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] database.slave_connection = **** {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.685822] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] database.sqlite_synchronous = True {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.685983] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] database.use_db_reconnect = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.686181] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] api_database.backend = sqlalchemy {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.686367] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] api_database.connection = **** {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.686583] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] api_database.connection_debug = 0 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.686770] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] api_database.connection_parameters = {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.687026] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] api_database.connection_recycle_time = 3600 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.687113] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] api_database.connection_trace = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.687280] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] api_database.db_inc_retry_interval = True {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.687444] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] api_database.db_max_retries = 20 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.687607] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] api_database.db_max_retry_interval = 10 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.687770] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] api_database.db_retry_interval = 1 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.687928] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] api_database.max_overflow = 50 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.688101] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] api_database.max_pool_size = 5 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.688266] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] api_database.max_retries = 10 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.688438] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] api_database.mysql_sql_mode = TRADITIONAL {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.688598] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] api_database.mysql_wsrep_sync_wait = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.688755] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] api_database.pool_timeout = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.688915] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] api_database.retry_interval = 10 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.689084] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] api_database.slave_connection = **** {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.689250] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] api_database.sqlite_synchronous = True {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.689427] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] devices.enabled_mdev_types = [] {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.689608] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] ephemeral_storage_encryption.cipher = aes-xts-plain64 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.689780] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] ephemeral_storage_encryption.default_format = luks {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.689942] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] ephemeral_storage_encryption.enabled = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.690121] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] ephemeral_storage_encryption.key_size = 512 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.690298] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] glance.api_servers = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.690465] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] glance.cafile = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.690628] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] glance.certfile = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.690793] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] glance.collect_timing = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.690953] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] glance.connect_retries = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.691125] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] glance.connect_retry_delay = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.691289] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] glance.debug = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.691458] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] glance.default_trusted_certificate_ids = [] {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.691625] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] glance.enable_certificate_validation = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.691789] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] glance.enable_rbd_download = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.691949] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] glance.endpoint_override = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.692130] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] glance.insecure = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.692297] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] glance.keyfile = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.692467] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] glance.max_version = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.692667] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] glance.min_version = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.692842] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] glance.num_retries = 3 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.693027] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] glance.rbd_ceph_conf = {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.693202] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] glance.rbd_connect_timeout = 5 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.693391] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] glance.rbd_pool = {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.693596] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] glance.rbd_user = {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.693766] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] glance.region_name = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.693930] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] glance.retriable_status_codes = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.694107] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] glance.service_name = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.694284] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] glance.service_type = image {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.694465] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] glance.split_loggers = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.694624] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] glance.status_code_retries = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.694784] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] glance.status_code_retry_delay = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.694944] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] glance.timeout = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.695141] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] glance.valid_interfaces = ['internal', 'public'] {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.695313] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] glance.verify_glance_signatures = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.695505] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] glance.version = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.695685] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] guestfs.debug = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.695857] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] manila.auth_section = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.696033] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] manila.auth_type = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.696201] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] manila.cafile = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.696370] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] manila.certfile = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.696574] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] manila.collect_timing = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.696748] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] manila.connect_retries = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.696911] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] manila.connect_retry_delay = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.697084] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] manila.endpoint_override = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.697253] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] manila.insecure = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.697414] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] manila.keyfile = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.697579] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] manila.max_version = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.697740] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] manila.min_version = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.697897] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] manila.region_name = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.698071] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] manila.retriable_status_codes = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.698235] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] manila.service_name = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.698406] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] manila.service_type = shared-file-system {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.698573] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] manila.share_apply_policy_timeout = 10 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.698736] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] manila.split_loggers = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.698895] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] manila.status_code_retries = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.699063] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] manila.status_code_retry_delay = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.699229] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] manila.timeout = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.699410] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] manila.valid_interfaces = ['internal', 'public'] {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.699575] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] manila.version = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.699746] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] mks.enabled = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.700108] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] mks.mksproxy_base_url = http://127.0.0.1:6090/ {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.700303] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] image_cache.manager_interval = 2400 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.700477] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] image_cache.precache_concurrency = 1 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.700652] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] image_cache.remove_unused_base_images = True {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.700822] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] image_cache.remove_unused_original_minimum_age_seconds = 86400 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.700992] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] image_cache.remove_unused_resized_minimum_age_seconds = 3600 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.701186] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] image_cache.subdirectory_name = _base {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.701366] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] ironic.api_max_retries = 60 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.701535] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] ironic.api_retry_interval = 2 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.701695] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] ironic.auth_section = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.701858] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] ironic.auth_type = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.702029] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] ironic.cafile = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.702196] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] ironic.certfile = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.702364] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] ironic.collect_timing = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.702531] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] ironic.conductor_group = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.702693] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] ironic.connect_retries = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.702852] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] ironic.connect_retry_delay = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.703016] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] ironic.endpoint_override = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.703187] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] ironic.insecure = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.703351] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] ironic.keyfile = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.703543] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] ironic.max_version = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.703710] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] ironic.min_version = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.703878] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] ironic.peer_list = [] {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.704051] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] ironic.region_name = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.704217] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] ironic.retriable_status_codes = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.704383] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] ironic.serial_console_state_timeout = 10 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.704568] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] ironic.service_name = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.704763] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] ironic.service_type = baremetal {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.704927] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] ironic.shard = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.705108] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] ironic.split_loggers = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.705274] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] ironic.status_code_retries = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.705473] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] ironic.status_code_retry_delay = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.705657] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] ironic.timeout = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.705847] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] ironic.valid_interfaces = ['internal', 'public'] {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.706023] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] ironic.version = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.706208] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] key_manager.backend = nova.keymgr.conf_key_mgr.ConfKeyManager {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.706386] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] key_manager.fixed_key = **** {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.706571] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] barbican.auth_endpoint = http://localhost/identity/v3 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.706735] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] barbican.barbican_api_version = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.706897] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] barbican.barbican_endpoint = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.707083] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] barbican.barbican_endpoint_type = public {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.707250] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] barbican.barbican_region_name = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.707412] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] barbican.cafile = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.707574] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] barbican.certfile = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.707740] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] barbican.collect_timing = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.707902] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] barbican.insecure = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.708072] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] barbican.keyfile = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.708240] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] barbican.number_of_retries = 60 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.708404] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] barbican.retry_delay = 1 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.708571] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] barbican.send_service_user_token = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.708733] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] barbican.split_loggers = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.708893] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] barbican.timeout = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.709066] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] barbican.verify_ssl = True {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.709229] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] barbican.verify_ssl_path = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.709401] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] barbican_service_user.auth_section = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.709567] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] barbican_service_user.auth_type = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.709728] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] barbican_service_user.cafile = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.709887] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] barbican_service_user.certfile = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.710066] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] barbican_service_user.collect_timing = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.710234] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] barbican_service_user.insecure = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.710395] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] barbican_service_user.keyfile = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.710561] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] barbican_service_user.split_loggers = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.710722] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] barbican_service_user.timeout = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.710890] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] vault.approle_role_id = **** {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.711062] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] vault.approle_secret_id = **** {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.711239] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] vault.kv_mountpoint = secret {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.711402] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] vault.kv_path = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.711568] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] vault.kv_version = 2 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.711731] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] vault.namespace = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.711892] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] vault.root_token_id = **** {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.712064] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] vault.ssl_ca_crt_file = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.712238] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] vault.timeout = 60.0 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.712407] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] vault.use_ssl = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.712580] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] vault.vault_url = http://127.0.0.1:8200 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.712751] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] keystone.auth_section = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.712917] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] keystone.auth_type = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.713092] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] keystone.cafile = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.713257] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] keystone.certfile = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.713450] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] keystone.collect_timing = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.713629] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] keystone.connect_retries = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.713791] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] keystone.connect_retry_delay = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.713954] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] keystone.endpoint_override = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.714138] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] keystone.insecure = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.714302] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] keystone.keyfile = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.714464] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] keystone.max_version = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.714626] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] keystone.min_version = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.714785] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] keystone.region_name = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.714944] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] keystone.retriable_status_codes = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.715117] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] keystone.service_name = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.715294] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] keystone.service_type = identity {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.715495] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] keystone.split_loggers = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.715669] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] keystone.status_code_retries = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.715832] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] keystone.status_code_retry_delay = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.715993] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] keystone.timeout = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.716195] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] keystone.valid_interfaces = ['internal', 'public'] {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.716365] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] keystone.version = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.716630] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] libvirt.connection_uri = {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.716803] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] libvirt.cpu_mode = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.716975] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] libvirt.cpu_model_extra_flags = [] {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.717164] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] libvirt.cpu_models = [] {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.717341] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] libvirt.cpu_power_governor_high = performance {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.717540] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] libvirt.cpu_power_governor_low = powersave {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.717725] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] libvirt.cpu_power_management = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.717903] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] libvirt.cpu_power_management_strategy = cpu_state {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.718081] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] libvirt.device_detach_attempts = 8 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.718254] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] libvirt.device_detach_timeout = 20 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.718424] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] libvirt.disk_cachemodes = [] {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.718590] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] libvirt.disk_prefix = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.718763] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] libvirt.enabled_perf_events = [] {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.718932] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] libvirt.file_backed_memory = 0 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.719119] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] libvirt.gid_maps = [] {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.719286] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] libvirt.hw_disk_discard = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.719453] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] libvirt.hw_machine_type = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.719631] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] libvirt.images_rbd_ceph_conf = {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.719796] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] libvirt.images_rbd_glance_copy_poll_interval = 15 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.719961] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] libvirt.images_rbd_glance_copy_timeout = 600 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.720146] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] libvirt.images_rbd_glance_store_name = {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.720319] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] libvirt.images_rbd_pool = rbd {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.720493] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] libvirt.images_type = default {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.720657] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] libvirt.images_volume_group = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.720823] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] libvirt.inject_key = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.720987] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] libvirt.inject_partition = -2 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.721165] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] libvirt.inject_password = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.721332] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] libvirt.iscsi_iface = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.721496] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] libvirt.iser_use_multipath = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.721663] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] libvirt.live_migration_bandwidth = 0 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.721827] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] libvirt.live_migration_completion_timeout = 800 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.721991] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] libvirt.live_migration_downtime = 500 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.722173] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] libvirt.live_migration_downtime_delay = 75 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.722339] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] libvirt.live_migration_downtime_steps = 10 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.722503] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] libvirt.live_migration_inbound_addr = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.722670] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] libvirt.live_migration_permit_auto_converge = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.722834] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] libvirt.live_migration_permit_post_copy = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.722994] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] libvirt.live_migration_scheme = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.723184] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] libvirt.live_migration_timeout_action = abort {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.723357] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] libvirt.live_migration_tunnelled = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.723549] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] libvirt.live_migration_uri = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.723778] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] libvirt.live_migration_with_native_tls = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.723956] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] libvirt.max_queues = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.724142] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] libvirt.mem_stats_period_seconds = 10 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.724387] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] libvirt.migration_inbound_addr = 10.180.1.21 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.724559] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] libvirt.nfs_mount_options = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.724849] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] libvirt.nfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.725034] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] libvirt.num_aoe_discover_tries = 3 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.725207] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] libvirt.num_iser_scan_tries = 5 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.725397] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] libvirt.num_memory_encrypted_guests = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.725579] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] libvirt.num_nvme_discover_tries = 5 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.725751] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] libvirt.num_pcie_ports = 0 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.725923] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] libvirt.num_volume_scan_tries = 5 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.726108] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] libvirt.pmem_namespaces = [] {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.726274] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] libvirt.quobyte_client_cfg = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.726595] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] libvirt.quobyte_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.726776] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] libvirt.rbd_connect_timeout = 5 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.726948] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] libvirt.rbd_destroy_volume_retries = 12 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.727130] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] libvirt.rbd_destroy_volume_retry_interval = 5 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.727296] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] libvirt.rbd_secret_uuid = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.727461] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] libvirt.rbd_user = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.727628] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] libvirt.realtime_scheduler_priority = 1 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.727805] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] libvirt.remote_filesystem_transport = ssh {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.727967] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] libvirt.rescue_image_id = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.728145] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] libvirt.rescue_kernel_id = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.728305] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] libvirt.rescue_ramdisk_id = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.728505] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] libvirt.rng_dev_path = /dev/urandom {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.728691] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] libvirt.rx_queue_size = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.728868] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] libvirt.smbfs_mount_options = {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.729163] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] libvirt.smbfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.729343] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] libvirt.snapshot_compression = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.729535] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] libvirt.snapshot_image_format = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.729782] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] libvirt.snapshots_directory = /opt/stack/data/nova/instances/snapshots {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.729959] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] libvirt.sparse_logical_volumes = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.730143] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] libvirt.swtpm_enabled = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.730319] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] libvirt.swtpm_group = tss {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.730492] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] libvirt.swtpm_user = tss {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.730668] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] libvirt.sysinfo_serial = unique {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.730834] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] libvirt.tb_cache_size = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.730991] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] libvirt.tx_queue_size = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.731171] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] libvirt.uid_maps = [] {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.731335] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] libvirt.use_virtio_for_bridges = True {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.731512] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] libvirt.virt_type = kvm {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.731691] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] libvirt.volume_clear = zero {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.731863] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] libvirt.volume_clear_size = 0 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.732039] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] libvirt.volume_use_multipath = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.732206] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] libvirt.vzstorage_cache_path = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.732385] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] libvirt.vzstorage_log_path = /var/log/vstorage/%(cluster_name)s/nova.log.gz {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.732562] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] libvirt.vzstorage_mount_group = qemu {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.732733] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] libvirt.vzstorage_mount_opts = [] {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.732903] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] libvirt.vzstorage_mount_perms = 0770 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.733198] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] libvirt.vzstorage_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.733398] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] libvirt.vzstorage_mount_user = stack {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.733583] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] libvirt.wait_soft_reboot_seconds = 120 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.733760] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] neutron.auth_section = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.733938] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] neutron.auth_type = password {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.734118] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] neutron.cafile = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.734285] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] neutron.certfile = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.735328] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] neutron.collect_timing = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.735328] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] neutron.connect_retries = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.735328] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] neutron.connect_retry_delay = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.735328] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] neutron.default_floating_pool = public {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.735328] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] neutron.endpoint_override = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.735328] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] neutron.extension_sync_interval = 600 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.735549] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] neutron.http_retries = 3 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.735581] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] neutron.insecure = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.735802] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] neutron.keyfile = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.735986] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] neutron.max_version = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.736187] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] neutron.metadata_proxy_shared_secret = **** {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.736350] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] neutron.min_version = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.736525] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] neutron.ovs_bridge = br-int {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.737311] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] neutron.physnets = [] {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.737311] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] neutron.region_name = RegionOne {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.737311] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] neutron.retriable_status_codes = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.737311] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] neutron.service_metadata_proxy = True {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.737467] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] neutron.service_name = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.737496] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] neutron.service_type = network {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.737663] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] neutron.split_loggers = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.737823] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] neutron.status_code_retries = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.737987] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] neutron.status_code_retry_delay = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.738162] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] neutron.timeout = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.742015] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] neutron.valid_interfaces = ['internal', 'public'] {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.742015] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] neutron.version = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.742015] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] notifications.bdms_in_notifications = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.742015] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] notifications.default_level = INFO {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.742015] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] notifications.notification_format = unversioned {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.742015] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] notifications.notify_on_state_change = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.742015] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] notifications.versioned_notifications_topics = ['versioned_notifications'] {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.742246] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] pci.alias = [] {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.742246] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] pci.device_spec = [] {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.742246] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] pci.report_in_placement = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.742246] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] placement.auth_section = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.742246] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] placement.auth_type = password {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.742246] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] placement.auth_url = http://10.180.1.21/identity {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.742246] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] placement.cafile = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.742429] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] placement.certfile = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.742429] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] placement.collect_timing = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.742429] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] placement.connect_retries = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.742429] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] placement.connect_retry_delay = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.742429] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] placement.default_domain_id = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.742429] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] placement.default_domain_name = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.742429] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] placement.domain_id = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.742602] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] placement.domain_name = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.742602] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] placement.endpoint_override = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.742602] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] placement.insecure = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.742602] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] placement.keyfile = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.742602] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] placement.max_version = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.742726] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] placement.min_version = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.742910] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] placement.password = **** {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.742969] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] placement.project_domain_id = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.743146] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] placement.project_domain_name = Default {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.743313] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] placement.project_id = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.743498] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] placement.project_name = service {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.743668] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] placement.region_name = RegionOne {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.743833] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] placement.retriable_status_codes = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.743993] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] placement.service_name = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.744173] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] placement.service_type = placement {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.744336] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] placement.split_loggers = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.745271] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] placement.status_code_retries = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.745271] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] placement.status_code_retry_delay = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.745271] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] placement.system_scope = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.745271] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] placement.timeout = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.745271] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] placement.trust_id = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.745271] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] placement.user_domain_id = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.745672] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] placement.user_domain_name = Default {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.745672] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] placement.user_id = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.745857] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] placement.username = nova {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.746068] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] placement.valid_interfaces = ['internal', 'public'] {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.746243] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] placement.version = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.746427] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] quota.cores = 20 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.746600] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] quota.count_usage_from_placement = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.746774] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] quota.driver = nova.quota.DbQuotaDriver {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.746952] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] quota.injected_file_content_bytes = 10240 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.747139] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] quota.injected_file_path_length = 255 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.747315] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] quota.injected_files = 5 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.747486] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] quota.instances = 10 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.747658] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] quota.key_pairs = 100 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.747826] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] quota.metadata_items = 128 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.747994] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] quota.ram = 51200 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.748175] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] quota.recheck_quota = True {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.748346] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] quota.server_group_members = 10 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.748513] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] quota.server_groups = 10 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.748689] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] scheduler.discover_hosts_in_cells_interval = -1 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.748857] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] scheduler.enable_isolated_aggregate_filtering = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.749029] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] scheduler.image_metadata_prefilter = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.749198] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] scheduler.limit_tenants_to_placement_aggregate = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.749365] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] scheduler.max_attempts = 3 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.749530] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] scheduler.max_placement_results = 1000 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.749694] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] scheduler.placement_aggregate_required_for_tenants = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.749858] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] scheduler.query_placement_for_image_type_support = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.750030] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] scheduler.query_placement_for_routed_network_aggregates = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.750212] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] scheduler.workers = 2 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.750387] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] filter_scheduler.aggregate_image_properties_isolation_namespace = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.750562] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] filter_scheduler.aggregate_image_properties_isolation_separator = . {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.750746] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] filter_scheduler.available_filters = ['nova.scheduler.filters.all_filters'] {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.750921] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] filter_scheduler.build_failure_weight_multiplier = 1000000.0 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.751104] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] filter_scheduler.cpu_weight_multiplier = 1.0 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.751275] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] filter_scheduler.cross_cell_move_weight_multiplier = 1000000.0 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.751442] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] filter_scheduler.disk_weight_multiplier = 1.0 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.751636] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] filter_scheduler.enabled_filters = ['ComputeFilter', 'ComputeCapabilitiesFilter', 'ImagePropertiesFilter', 'ServerGroupAntiAffinityFilter', 'ServerGroupAffinityFilter', 'SameHostFilter', 'DifferentHostFilter'] {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.751807] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] filter_scheduler.host_subset_size = 1 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.751972] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] filter_scheduler.hypervisor_version_weight_multiplier = 1.0 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.752149] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] filter_scheduler.image_properties_default_architecture = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.752314] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] filter_scheduler.io_ops_weight_multiplier = -1.0 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.752480] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] filter_scheduler.isolated_hosts = [] {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.752680] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] filter_scheduler.isolated_images = [] {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.752855] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] filter_scheduler.max_instances_per_host = 50 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.753027] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] filter_scheduler.max_io_ops_per_host = 8 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.753198] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] filter_scheduler.num_instances_weight_multiplier = 0.0 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.753369] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] filter_scheduler.pci_in_placement = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.753571] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] filter_scheduler.pci_weight_multiplier = 1.0 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.753748] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] filter_scheduler.ram_weight_multiplier = 1.0 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.753915] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] filter_scheduler.restrict_isolated_hosts_to_isolated_images = True {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.754103] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] filter_scheduler.shuffle_best_same_weighed_hosts = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.754271] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] filter_scheduler.soft_affinity_weight_multiplier = 1.0 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.754438] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] filter_scheduler.soft_anti_affinity_weight_multiplier = 1.0 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.754610] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] filter_scheduler.track_instance_changes = True {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.754790] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] filter_scheduler.weight_classes = ['nova.scheduler.weights.all_weighers'] {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.754963] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] metrics.required = True {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.755147] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] metrics.weight_multiplier = 1.0 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.755314] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] metrics.weight_of_unavailable = -10000.0 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.755508] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] metrics.weight_setting = [] {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.755828] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] serial_console.base_url = ws://127.0.0.1:6083/ {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.756012] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] serial_console.enabled = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.756203] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] serial_console.port_range = 10000:20000 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.756393] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] serial_console.proxyclient_address = 127.0.0.1 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.756583] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] serial_console.serialproxy_host = 0.0.0.0 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.756754] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] serial_console.serialproxy_port = 6083 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.756926] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] service_user.auth_section = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.757114] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] service_user.auth_type = password {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.757279] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] service_user.cafile = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.757439] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] service_user.certfile = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.757603] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] service_user.collect_timing = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.757764] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] service_user.insecure = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.757921] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] service_user.keyfile = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.758101] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] service_user.send_service_user_token = True {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.758269] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] service_user.split_loggers = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.758427] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] service_user.timeout = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.758599] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] spice.agent_enabled = True {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.758763] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] spice.enabled = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.759081] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] spice.html5proxy_base_url = http://127.0.0.1:6082/spice_auto.html {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.759278] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] spice.html5proxy_host = 0.0.0.0 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.759449] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] spice.html5proxy_port = 6082 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.759616] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] spice.image_compression = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.759777] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] spice.jpeg_compression = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.759935] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] spice.playback_compression = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.760115] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] spice.require_secure = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.760289] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] spice.server_listen = 127.0.0.1 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.760459] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] spice.server_proxyclient_address = 127.0.0.1 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.760620] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] spice.streaming_mode = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.760780] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] spice.zlib_compression = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.760947] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] upgrade_levels.baseapi = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.761133] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] upgrade_levels.compute = auto {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.761297] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] upgrade_levels.conductor = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.761456] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] upgrade_levels.scheduler = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.761626] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] vendordata_dynamic_auth.auth_section = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.761790] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] vendordata_dynamic_auth.auth_type = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.761950] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] vendordata_dynamic_auth.cafile = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.762124] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] vendordata_dynamic_auth.certfile = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.762290] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] vendordata_dynamic_auth.collect_timing = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.762453] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] vendordata_dynamic_auth.insecure = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.762615] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] vendordata_dynamic_auth.keyfile = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.762779] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] vendordata_dynamic_auth.split_loggers = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.762938] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] vendordata_dynamic_auth.timeout = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.763125] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] vmware.api_retry_count = 10 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.763290] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] vmware.ca_file = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.763497] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] vmware.cache_prefix = devstack-image-cache {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.763681] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] vmware.cluster_name = testcl1 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.763849] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] vmware.connection_pool_size = 10 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.764020] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] vmware.console_delay_seconds = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.764200] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] vmware.datastore_regex = ^datastore.* {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.764412] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] vmware.host_ip = vc1.osci.c.eu-de-1.cloud.sap {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.764601] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] vmware.host_password = **** {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.764806] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] vmware.host_port = 443 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.764984] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] vmware.host_username = administrator@vsphere.local {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.765173] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] vmware.insecure = True {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.765341] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] vmware.integration_bridge = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.765556] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] vmware.maximum_objects = 100 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.765730] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] vmware.pbm_default_policy = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.765897] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] vmware.pbm_enabled = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.766071] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] vmware.pbm_wsdl_location = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.766251] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] vmware.serial_log_dir = /opt/vmware/vspc {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.766418] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] vmware.serial_port_proxy_uri = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.766582] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] vmware.serial_port_service_uri = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.766751] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] vmware.task_poll_interval = 0.5 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.766925] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] vmware.use_linked_clone = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.767110] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] vmware.vnc_keymap = en-us {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.767282] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] vmware.vnc_port = 5900 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.767448] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] vmware.vnc_port_total = 10000 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.767642] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] vnc.auth_schemes = ['none'] {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.767819] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] vnc.enabled = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.768127] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] vnc.novncproxy_base_url = http://127.0.0.1:6080/vnc_auto.html {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.768320] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] vnc.novncproxy_host = 0.0.0.0 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.768494] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] vnc.novncproxy_port = 6080 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.768679] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] vnc.server_listen = 127.0.0.1 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.768856] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] vnc.server_proxyclient_address = 127.0.0.1 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.769030] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] vnc.vencrypt_ca_certs = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.769200] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] vnc.vencrypt_client_cert = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.769368] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] vnc.vencrypt_client_key = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.769548] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] workarounds.disable_compute_service_check_for_ffu = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.769713] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] workarounds.disable_deep_image_inspection = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.769873] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] workarounds.disable_fallback_pcpu_query = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.770046] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] workarounds.disable_group_policy_check_upcall = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.770212] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] workarounds.disable_libvirt_livesnapshot = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.770376] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] workarounds.disable_rootwrap = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.770540] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] workarounds.enable_numa_live_migration = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.770701] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] workarounds.enable_qemu_monitor_announce_self = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.770863] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] workarounds.ensure_libvirt_rbd_instance_dir_cleanup = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.771033] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] workarounds.handle_virt_lifecycle_events = True {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.771200] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] workarounds.libvirt_disable_apic = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.771364] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] workarounds.never_download_image_if_on_rbd = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.771529] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] workarounds.qemu_monitor_announce_self_count = 3 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.771691] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] workarounds.qemu_monitor_announce_self_interval = 1 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.771852] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] workarounds.reserve_disk_resource_for_image_cache = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.772019] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] workarounds.skip_cpu_compare_at_startup = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.772188] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] workarounds.skip_cpu_compare_on_dest = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.772350] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] workarounds.skip_hypervisor_version_check_on_lm = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.772510] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] workarounds.skip_reserve_in_use_ironic_nodes = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.772670] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] workarounds.unified_limits_count_pcpu_as_vcpu = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.772837] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] workarounds.wait_for_vif_plugged_event_during_hard_reboot = [] {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.773034] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] wsgi.api_paste_config = /etc/nova/api-paste.ini {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.773213] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] wsgi.client_socket_timeout = 900 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.773400] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] wsgi.default_pool_size = 1000 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.773589] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] wsgi.keep_alive = True {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.773763] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] wsgi.max_header_line = 16384 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.773930] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] wsgi.secure_proxy_ssl_header = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.774109] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] wsgi.ssl_ca_file = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.774277] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] wsgi.ssl_cert_file = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.774443] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] wsgi.ssl_key_file = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.774611] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] wsgi.tcp_keepidle = 600 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.774792] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] wsgi.wsgi_log_format = %(client_ip)s "%(request_line)s" status: %(status_code)s len: %(body_length)s time: %(wall_seconds).7f {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.774964] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] zvm.ca_file = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.775141] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] zvm.cloud_connector_url = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.775461] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] zvm.image_tmp_path = /opt/stack/data/n-cpu-1/images {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.775647] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] zvm.reachable_timeout = 300 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.775834] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] oslo_policy.enforce_new_defaults = True {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.776311] env[62503]: WARNING oslo_config.cfg [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] Deprecated: Option "enforce_scope" from group "oslo_policy" is deprecated for removal (This configuration was added temporarily to facilitate a smooth transition to the new RBAC. OpenStack will always enforce scope checks. This configuration option is deprecated and will be removed in the 2025.2 cycle.). Its value may be silently ignored in the future. [ 489.776618] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] oslo_policy.enforce_scope = True {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.776921] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] oslo_policy.policy_default_rule = default {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.777246] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] oslo_policy.policy_dirs = ['policy.d'] {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.777488] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] oslo_policy.policy_file = policy.yaml {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.777699] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] oslo_policy.remote_content_type = application/x-www-form-urlencoded {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.777872] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] oslo_policy.remote_ssl_ca_crt_file = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.778051] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] oslo_policy.remote_ssl_client_crt_file = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.778221] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] oslo_policy.remote_ssl_client_key_file = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.778394] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] oslo_policy.remote_ssl_verify_server_crt = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.778579] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] oslo_versionedobjects.fatal_exception_format_errors = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.778759] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] oslo_middleware.http_basic_auth_user_file = /etc/htpasswd {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.778939] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] profiler.connection_string = messaging:// {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.779125] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] profiler.enabled = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.779299] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] profiler.es_doc_type = notification {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.779466] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] profiler.es_scroll_size = 10000 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.779636] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] profiler.es_scroll_time = 2m {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.779801] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] profiler.filter_error_trace = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.779971] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] profiler.hmac_keys = **** {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.782442] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] profiler.sentinel_service_name = mymaster {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.782442] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] profiler.socket_timeout = 0.1 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.782442] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] profiler.trace_requests = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.782442] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] profiler.trace_sqlalchemy = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.782442] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] profiler_jaeger.process_tags = {} {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.782442] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] profiler_jaeger.service_name_prefix = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.782442] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] profiler_otlp.service_name_prefix = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.782719] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] remote_debug.host = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.782719] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] remote_debug.port = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.782719] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] oslo_messaging_rabbit.amqp_auto_delete = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.782719] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] oslo_messaging_rabbit.amqp_durable_queues = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.782719] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] oslo_messaging_rabbit.conn_pool_min_size = 2 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.782719] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] oslo_messaging_rabbit.conn_pool_ttl = 1200 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.782719] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] oslo_messaging_rabbit.direct_mandatory_flag = True {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.782895] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] oslo_messaging_rabbit.enable_cancel_on_failover = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.782895] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] oslo_messaging_rabbit.heartbeat_in_pthread = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.782895] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] oslo_messaging_rabbit.heartbeat_rate = 3 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.782895] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] oslo_messaging_rabbit.heartbeat_timeout_threshold = 60 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.783114] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] oslo_messaging_rabbit.hostname = devstack {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.783183] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] oslo_messaging_rabbit.kombu_compression = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.783364] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] oslo_messaging_rabbit.kombu_failover_strategy = round-robin {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.783559] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] oslo_messaging_rabbit.kombu_missing_consumer_retry_timeout = 60 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.783724] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] oslo_messaging_rabbit.kombu_reconnect_delay = 1.0 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.783898] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] oslo_messaging_rabbit.processname = nova-compute {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.784081] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] oslo_messaging_rabbit.rabbit_ha_queues = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.784249] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] oslo_messaging_rabbit.rabbit_interval_max = 30 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.784426] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] oslo_messaging_rabbit.rabbit_login_method = AMQPLAIN {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.784601] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] oslo_messaging_rabbit.rabbit_qos_prefetch_count = 0 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.784760] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] oslo_messaging_rabbit.rabbit_quorum_delivery_limit = 0 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.784925] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_bytes = 0 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.785103] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_length = 0 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.785271] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] oslo_messaging_rabbit.rabbit_quorum_queue = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.785461] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] oslo_messaging_rabbit.rabbit_retry_backoff = 2 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.785638] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] oslo_messaging_rabbit.rabbit_retry_interval = 1 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.785857] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] oslo_messaging_rabbit.rabbit_stream_fanout = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.786058] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] oslo_messaging_rabbit.rabbit_transient_queues_ttl = 1800 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.786229] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] oslo_messaging_rabbit.rabbit_transient_quorum_queue = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.786412] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] oslo_messaging_rabbit.rpc_conn_pool_size = 30 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.786584] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] oslo_messaging_rabbit.ssl = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.786761] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] oslo_messaging_rabbit.ssl_ca_file = {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.786934] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] oslo_messaging_rabbit.ssl_cert_file = {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.787115] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] oslo_messaging_rabbit.ssl_enforce_fips_mode = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.787289] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] oslo_messaging_rabbit.ssl_key_file = {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.787461] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] oslo_messaging_rabbit.ssl_version = {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.787627] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] oslo_messaging_rabbit.use_queue_manager = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.787815] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] oslo_messaging_notifications.driver = ['messagingv2'] {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.787983] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] oslo_messaging_notifications.retry = -1 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.788186] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] oslo_messaging_notifications.topics = ['notifications'] {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.788363] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] oslo_messaging_notifications.transport_url = **** {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.788536] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] oslo_limit.auth_section = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.788700] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] oslo_limit.auth_type = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.788859] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] oslo_limit.cafile = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.789024] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] oslo_limit.certfile = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.789192] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] oslo_limit.collect_timing = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.789350] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] oslo_limit.connect_retries = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.789527] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] oslo_limit.connect_retry_delay = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.789713] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] oslo_limit.endpoint_id = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.789875] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] oslo_limit.endpoint_override = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.790046] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] oslo_limit.insecure = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.790209] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] oslo_limit.keyfile = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.790367] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] oslo_limit.max_version = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.790523] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] oslo_limit.min_version = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.790680] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] oslo_limit.region_name = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.790840] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] oslo_limit.retriable_status_codes = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.790996] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] oslo_limit.service_name = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.791167] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] oslo_limit.service_type = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.791327] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] oslo_limit.split_loggers = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.791483] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] oslo_limit.status_code_retries = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.791644] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] oslo_limit.status_code_retry_delay = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.791797] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] oslo_limit.timeout = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.791953] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] oslo_limit.valid_interfaces = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.792122] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] oslo_limit.version = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.792288] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] oslo_reports.file_event_handler = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.792452] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] oslo_reports.file_event_handler_interval = 1 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.792611] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] oslo_reports.log_dir = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.792783] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] vif_plug_linux_bridge_privileged.capabilities = [12] {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.792943] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] vif_plug_linux_bridge_privileged.group = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.793114] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] vif_plug_linux_bridge_privileged.helper_command = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.793281] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] vif_plug_linux_bridge_privileged.logger_name = oslo_privsep.daemon {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.793470] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] vif_plug_linux_bridge_privileged.thread_pool_size = 8 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.793642] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] vif_plug_linux_bridge_privileged.user = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.793816] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] vif_plug_ovs_privileged.capabilities = [12, 1] {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.793978] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] vif_plug_ovs_privileged.group = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.794153] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] vif_plug_ovs_privileged.helper_command = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.794321] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] vif_plug_ovs_privileged.logger_name = oslo_privsep.daemon {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.794485] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] vif_plug_ovs_privileged.thread_pool_size = 8 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.794645] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] vif_plug_ovs_privileged.user = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.794817] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] os_vif_linux_bridge.flat_interface = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.794999] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] os_vif_linux_bridge.forward_bridge_interface = ['all'] {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.795190] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] os_vif_linux_bridge.iptables_bottom_regex = {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.795384] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] os_vif_linux_bridge.iptables_drop_action = DROP {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.795570] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] os_vif_linux_bridge.iptables_top_regex = {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.795742] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] os_vif_linux_bridge.network_device_mtu = 1500 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.795910] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] os_vif_linux_bridge.use_ipv6 = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.796088] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] os_vif_linux_bridge.vlan_interface = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.796273] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] os_vif_ovs.default_qos_type = linux-noop {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.796462] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] os_vif_ovs.isolate_vif = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.796644] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] os_vif_ovs.network_device_mtu = 1500 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.796815] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] os_vif_ovs.ovs_vsctl_timeout = 120 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.796988] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] os_vif_ovs.ovsdb_connection = tcp:127.0.0.1:6640 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.797186] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] os_vif_ovs.ovsdb_interface = native {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.797338] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] os_vif_ovs.per_port_bridge = False {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.797512] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] privsep_osbrick.capabilities = [21] {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.797676] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] privsep_osbrick.group = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.797835] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] privsep_osbrick.helper_command = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.798013] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] privsep_osbrick.logger_name = os_brick.privileged {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.798182] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] privsep_osbrick.thread_pool_size = 8 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.798340] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] privsep_osbrick.user = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.798543] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] nova_sys_admin.capabilities = [0, 1, 2, 3, 12, 21] {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.798715] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] nova_sys_admin.group = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.798877] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] nova_sys_admin.helper_command = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.799055] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] nova_sys_admin.logger_name = oslo_privsep.daemon {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.799223] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] nova_sys_admin.thread_pool_size = 8 {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.799382] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] nova_sys_admin.user = None {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 489.799516] env[62503]: DEBUG oslo_service.service [None req-5eb706b4-0829-44f4-af24-7b391f0c1fcc None None] ******************************************************************************** {{(pid=62503) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2830}} [ 489.800016] env[62503]: INFO nova.service [-] Starting compute node (version 0.0.1) [ 490.303296] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-d9f18745-73c6-4c7f-989c-df5fcb79b53c None None] Getting list of instances from cluster (obj){ [ 490.303296] env[62503]: value = "domain-c8" [ 490.303296] env[62503]: _type = "ClusterComputeResource" [ 490.303296] env[62503]: } {{(pid=62503) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 490.304477] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4da4077d-bb8f-41aa-a410-58d450b4e579 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 490.313261] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-d9f18745-73c6-4c7f-989c-df5fcb79b53c None None] Got total of 0 instances {{(pid=62503) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 490.313783] env[62503]: WARNING nova.virt.vmwareapi.driver [None req-d9f18745-73c6-4c7f-989c-df5fcb79b53c None None] The vmwareapi driver is not tested by the OpenStack project nor does it have clear maintainer(s) and thus its quality can not be ensured. It should be considered experimental and may be removed in a future release. If you are using the driver in production please let us know via the openstack-discuss mailing list. [ 490.314254] env[62503]: INFO nova.virt.node [None req-d9f18745-73c6-4c7f-989c-df5fcb79b53c None None] Generated node identity 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 [ 490.314482] env[62503]: INFO nova.virt.node [None req-d9f18745-73c6-4c7f-989c-df5fcb79b53c None None] Wrote node identity 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 to /opt/stack/data/n-cpu-1/compute_id [ 490.818685] env[62503]: WARNING nova.compute.manager [None req-d9f18745-73c6-4c7f-989c-df5fcb79b53c None None] Compute nodes ['1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2'] for host cpu-1 were not found in the database. If this is the first time this service is starting on this host, then you can ignore this warning. [ 491.824671] env[62503]: INFO nova.compute.manager [None req-d9f18745-73c6-4c7f-989c-df5fcb79b53c None None] Looking for unclaimed instances stuck in BUILDING status for nodes managed by this host [ 492.829760] env[62503]: WARNING nova.compute.manager [None req-d9f18745-73c6-4c7f-989c-df5fcb79b53c None None] No compute node record found for host cpu-1. If this is the first time this service is starting on this host, then you can ignore this warning.: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host cpu-1 could not be found. [ 492.830103] env[62503]: DEBUG oslo_concurrency.lockutils [None req-d9f18745-73c6-4c7f-989c-df5fcb79b53c None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 492.830252] env[62503]: DEBUG oslo_concurrency.lockutils [None req-d9f18745-73c6-4c7f-989c-df5fcb79b53c None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 492.830375] env[62503]: DEBUG oslo_concurrency.lockutils [None req-d9f18745-73c6-4c7f-989c-df5fcb79b53c None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 492.830532] env[62503]: DEBUG nova.compute.resource_tracker [None req-d9f18745-73c6-4c7f-989c-df5fcb79b53c None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62503) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 492.831474] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82d16795-2f2a-4af4-b994-ba7d359a537c {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 492.839890] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf1af6c7-585b-416b-8101-29cfdb81d790 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 492.854014] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-344c88ff-3ec6-4c13-8c5c-0ac6d0f83700 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 492.860947] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c949373-3257-4509-b0fd-710c1e50c1f6 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 492.890379] env[62503]: DEBUG nova.compute.resource_tracker [None req-d9f18745-73c6-4c7f-989c-df5fcb79b53c None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181387MB free_disk=175GB free_vcpus=48 pci_devices=None {{(pid=62503) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 492.890510] env[62503]: DEBUG oslo_concurrency.lockutils [None req-d9f18745-73c6-4c7f-989c-df5fcb79b53c None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 492.890796] env[62503]: DEBUG oslo_concurrency.lockutils [None req-d9f18745-73c6-4c7f-989c-df5fcb79b53c None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 493.393182] env[62503]: WARNING nova.compute.resource_tracker [None req-d9f18745-73c6-4c7f-989c-df5fcb79b53c None None] No compute node record for cpu-1:1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 could not be found. [ 493.897467] env[62503]: INFO nova.compute.resource_tracker [None req-d9f18745-73c6-4c7f-989c-df5fcb79b53c None None] Compute node record created for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 with uuid: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 [ 495.405588] env[62503]: DEBUG nova.compute.resource_tracker [None req-d9f18745-73c6-4c7f-989c-df5fcb79b53c None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62503) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 495.405932] env[62503]: DEBUG nova.compute.resource_tracker [None req-d9f18745-73c6-4c7f-989c-df5fcb79b53c None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62503) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 495.667248] env[62503]: INFO nova.scheduler.client.report [None req-d9f18745-73c6-4c7f-989c-df5fcb79b53c None None] [req-60a7f296-c41d-48cb-8545-6e0c4b4d3dad] Created resource provider record via placement API for resource provider with UUID 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 and name domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28. [ 495.683566] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bd30eee-fc8a-4102-b2ad-afdf7f6a320d {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 495.691293] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60673220-f185-4aef-8e9e-dcef547ce8c1 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 495.721561] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-445fa53e-bf19-40ce-9084-b623ad1805d7 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 495.728517] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98480195-4c80-4909-9d38-632b43688fdb {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 495.741163] env[62503]: DEBUG nova.compute.provider_tree [None req-d9f18745-73c6-4c7f-989c-df5fcb79b53c None None] Updating inventory in ProviderTree for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 496.278309] env[62503]: DEBUG nova.scheduler.client.report [None req-d9f18745-73c6-4c7f-989c-df5fcb79b53c None None] Updated inventory for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 with generation 0 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:972}} [ 496.278554] env[62503]: DEBUG nova.compute.provider_tree [None req-d9f18745-73c6-4c7f-989c-df5fcb79b53c None None] Updating resource provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 generation from 0 to 1 during operation: update_inventory {{(pid=62503) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 496.278702] env[62503]: DEBUG nova.compute.provider_tree [None req-d9f18745-73c6-4c7f-989c-df5fcb79b53c None None] Updating inventory in ProviderTree for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 496.327602] env[62503]: DEBUG nova.compute.provider_tree [None req-d9f18745-73c6-4c7f-989c-df5fcb79b53c None None] Updating resource provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 generation from 1 to 2 during operation: update_traits {{(pid=62503) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 496.832663] env[62503]: DEBUG nova.compute.resource_tracker [None req-d9f18745-73c6-4c7f-989c-df5fcb79b53c None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62503) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 496.833101] env[62503]: DEBUG oslo_concurrency.lockutils [None req-d9f18745-73c6-4c7f-989c-df5fcb79b53c None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.942s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 496.833240] env[62503]: DEBUG nova.service [None req-d9f18745-73c6-4c7f-989c-df5fcb79b53c None None] Creating RPC server for service compute {{(pid=62503) start /opt/stack/nova/nova/service.py:186}} [ 496.847221] env[62503]: DEBUG nova.service [None req-d9f18745-73c6-4c7f-989c-df5fcb79b53c None None] Join ServiceGroup membership for this service compute {{(pid=62503) start /opt/stack/nova/nova/service.py:203}} [ 496.847478] env[62503]: DEBUG nova.servicegroup.drivers.db [None req-d9f18745-73c6-4c7f-989c-df5fcb79b53c None None] DB_Driver: join new ServiceGroup member cpu-1 to the compute group, service = {{(pid=62503) join /opt/stack/nova/nova/servicegroup/drivers/db.py:44}} [ 507.851084] env[62503]: DEBUG oslo_service.periodic_task [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Running periodic task ComputeManager._sync_power_states {{(pid=62503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 508.354473] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Getting list of instances from cluster (obj){ [ 508.354473] env[62503]: value = "domain-c8" [ 508.354473] env[62503]: _type = "ClusterComputeResource" [ 508.354473] env[62503]: } {{(pid=62503) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 508.355713] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a217c264-6e04-40aa-ba04-243804f84712 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 508.365259] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Got total of 0 instances {{(pid=62503) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 508.365478] env[62503]: DEBUG oslo_service.periodic_task [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Running periodic task ComputeManager._cleanup_running_deleted_instances {{(pid=62503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 508.365787] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Getting list of instances from cluster (obj){ [ 508.365787] env[62503]: value = "domain-c8" [ 508.365787] env[62503]: _type = "ClusterComputeResource" [ 508.365787] env[62503]: } {{(pid=62503) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 508.366614] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82fd26a2-2b12-4b7b-b874-299cf315f9c0 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 508.373368] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Got total of 0 instances {{(pid=62503) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 534.146598] env[62503]: DEBUG oslo_concurrency.lockutils [None req-e36023cb-3d1e-4b47-ab32-fcc44cb2b55b tempest-ServerExternalEventsTest-1279836267 tempest-ServerExternalEventsTest-1279836267-project-member] Acquiring lock "a62798a5-37ba-45be-be56-76e19ce3e189" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 534.146598] env[62503]: DEBUG oslo_concurrency.lockutils [None req-e36023cb-3d1e-4b47-ab32-fcc44cb2b55b tempest-ServerExternalEventsTest-1279836267 tempest-ServerExternalEventsTest-1279836267-project-member] Lock "a62798a5-37ba-45be-be56-76e19ce3e189" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 534.650695] env[62503]: DEBUG nova.compute.manager [None req-e36023cb-3d1e-4b47-ab32-fcc44cb2b55b tempest-ServerExternalEventsTest-1279836267 tempest-ServerExternalEventsTest-1279836267-project-member] [instance: a62798a5-37ba-45be-be56-76e19ce3e189] Starting instance... {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 534.721236] env[62503]: DEBUG oslo_concurrency.lockutils [None req-fe1e164b-4aab-4244-8a41-f3565d333c9b tempest-ServerDiagnosticsTest-976195064 tempest-ServerDiagnosticsTest-976195064-project-member] Acquiring lock "d88c42de-dafe-4bb1-bd56-a770524529f3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 534.721484] env[62503]: DEBUG oslo_concurrency.lockutils [None req-fe1e164b-4aab-4244-8a41-f3565d333c9b tempest-ServerDiagnosticsTest-976195064 tempest-ServerDiagnosticsTest-976195064-project-member] Lock "d88c42de-dafe-4bb1-bd56-a770524529f3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 535.227323] env[62503]: DEBUG nova.compute.manager [None req-fe1e164b-4aab-4244-8a41-f3565d333c9b tempest-ServerDiagnosticsTest-976195064 tempest-ServerDiagnosticsTest-976195064-project-member] [instance: d88c42de-dafe-4bb1-bd56-a770524529f3] Starting instance... {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 535.242223] env[62503]: DEBUG oslo_concurrency.lockutils [None req-e36023cb-3d1e-4b47-ab32-fcc44cb2b55b tempest-ServerExternalEventsTest-1279836267 tempest-ServerExternalEventsTest-1279836267-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 535.242223] env[62503]: DEBUG oslo_concurrency.lockutils [None req-e36023cb-3d1e-4b47-ab32-fcc44cb2b55b tempest-ServerExternalEventsTest-1279836267 tempest-ServerExternalEventsTest-1279836267-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 535.242223] env[62503]: INFO nova.compute.claims [None req-e36023cb-3d1e-4b47-ab32-fcc44cb2b55b tempest-ServerExternalEventsTest-1279836267 tempest-ServerExternalEventsTest-1279836267-project-member] [instance: a62798a5-37ba-45be-be56-76e19ce3e189] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 535.413201] env[62503]: DEBUG oslo_concurrency.lockutils [None req-0b7a3a73-1934-490b-9fc5-04cb3dcd618e tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] Acquiring lock "90e42997-a34c-4a39-8d2f-7ab0ed19f028" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 535.414146] env[62503]: DEBUG oslo_concurrency.lockutils [None req-0b7a3a73-1934-490b-9fc5-04cb3dcd618e tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] Lock "90e42997-a34c-4a39-8d2f-7ab0ed19f028" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 535.766431] env[62503]: DEBUG oslo_concurrency.lockutils [None req-fe1e164b-4aab-4244-8a41-f3565d333c9b tempest-ServerDiagnosticsTest-976195064 tempest-ServerDiagnosticsTest-976195064-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 535.791124] env[62503]: DEBUG oslo_concurrency.lockutils [None req-b7e47845-e371-42bf-a29f-c41620bf02ac tempest-DeleteServersAdminTestJSON-318637377 tempest-DeleteServersAdminTestJSON-318637377-project-member] Acquiring lock "f8142528-e04c-444a-a252-84e98cecee74" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 535.791414] env[62503]: DEBUG oslo_concurrency.lockutils [None req-b7e47845-e371-42bf-a29f-c41620bf02ac tempest-DeleteServersAdminTestJSON-318637377 tempest-DeleteServersAdminTestJSON-318637377-project-member] Lock "f8142528-e04c-444a-a252-84e98cecee74" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 535.824201] env[62503]: DEBUG oslo_concurrency.lockutils [None req-6237cd83-53e5-445e-991f-0fa75149e2aa tempest-TenantUsagesTestJSON-2099757901 tempest-TenantUsagesTestJSON-2099757901-project-member] Acquiring lock "b570c2ad-18ac-42ea-bc2e-009992ece3fe" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 535.824849] env[62503]: DEBUG oslo_concurrency.lockutils [None req-6237cd83-53e5-445e-991f-0fa75149e2aa tempest-TenantUsagesTestJSON-2099757901 tempest-TenantUsagesTestJSON-2099757901-project-member] Lock "b570c2ad-18ac-42ea-bc2e-009992ece3fe" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 535.918055] env[62503]: DEBUG nova.compute.manager [None req-0b7a3a73-1934-490b-9fc5-04cb3dcd618e tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] [instance: 90e42997-a34c-4a39-8d2f-7ab0ed19f028] Starting instance... {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 536.295988] env[62503]: DEBUG nova.compute.manager [None req-b7e47845-e371-42bf-a29f-c41620bf02ac tempest-DeleteServersAdminTestJSON-318637377 tempest-DeleteServersAdminTestJSON-318637377-project-member] [instance: f8142528-e04c-444a-a252-84e98cecee74] Starting instance... {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 536.329528] env[62503]: DEBUG nova.compute.manager [None req-6237cd83-53e5-445e-991f-0fa75149e2aa tempest-TenantUsagesTestJSON-2099757901 tempest-TenantUsagesTestJSON-2099757901-project-member] [instance: b570c2ad-18ac-42ea-bc2e-009992ece3fe] Starting instance... {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 536.414825] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1765c41d-b84d-4e54-8bfb-f8d13a336425 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 536.431106] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4853c57f-6339-43b4-a150-f00f8f025065 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 536.473126] env[62503]: DEBUG oslo_concurrency.lockutils [None req-0b7a3a73-1934-490b-9fc5-04cb3dcd618e tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 536.474125] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8282dd2d-095a-4e80-9051-0ac7a663c8f7 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 536.479687] env[62503]: DEBUG oslo_concurrency.lockutils [None req-2175fec1-420b-4607-b7a6-fef627f43b34 tempest-ImagesOneServerNegativeTestJSON-1494729482 tempest-ImagesOneServerNegativeTestJSON-1494729482-project-member] Acquiring lock "0b1ead99-af3b-41a6-8354-bc451a51133c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 536.479906] env[62503]: DEBUG oslo_concurrency.lockutils [None req-2175fec1-420b-4607-b7a6-fef627f43b34 tempest-ImagesOneServerNegativeTestJSON-1494729482 tempest-ImagesOneServerNegativeTestJSON-1494729482-project-member] Lock "0b1ead99-af3b-41a6-8354-bc451a51133c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 536.487509] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c92ad148-5d1e-41c2-b17b-41a7a18be609 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 536.509393] env[62503]: DEBUG nova.compute.provider_tree [None req-e36023cb-3d1e-4b47-ab32-fcc44cb2b55b tempest-ServerExternalEventsTest-1279836267 tempest-ServerExternalEventsTest-1279836267-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 536.827156] env[62503]: DEBUG oslo_concurrency.lockutils [None req-b7e47845-e371-42bf-a29f-c41620bf02ac tempest-DeleteServersAdminTestJSON-318637377 tempest-DeleteServersAdminTestJSON-318637377-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 536.859298] env[62503]: DEBUG oslo_concurrency.lockutils [None req-6237cd83-53e5-445e-991f-0fa75149e2aa tempest-TenantUsagesTestJSON-2099757901 tempest-TenantUsagesTestJSON-2099757901-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 536.985547] env[62503]: DEBUG nova.compute.manager [None req-2175fec1-420b-4607-b7a6-fef627f43b34 tempest-ImagesOneServerNegativeTestJSON-1494729482 tempest-ImagesOneServerNegativeTestJSON-1494729482-project-member] [instance: 0b1ead99-af3b-41a6-8354-bc451a51133c] Starting instance... {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 537.016481] env[62503]: DEBUG nova.scheduler.client.report [None req-e36023cb-3d1e-4b47-ab32-fcc44cb2b55b tempest-ServerExternalEventsTest-1279836267 tempest-ServerExternalEventsTest-1279836267-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 537.050574] env[62503]: DEBUG oslo_concurrency.lockutils [None req-58237dd7-a314-47a2-91b0-62504c9b4c3a tempest-MigrationsAdminTest-1483146718 tempest-MigrationsAdminTest-1483146718-project-member] Acquiring lock "18c503dc-6283-4489-a69c-8dead1ec3a0d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 537.050802] env[62503]: DEBUG oslo_concurrency.lockutils [None req-58237dd7-a314-47a2-91b0-62504c9b4c3a tempest-MigrationsAdminTest-1483146718 tempest-MigrationsAdminTest-1483146718-project-member] Lock "18c503dc-6283-4489-a69c-8dead1ec3a0d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 537.522497] env[62503]: DEBUG oslo_concurrency.lockutils [None req-e36023cb-3d1e-4b47-ab32-fcc44cb2b55b tempest-ServerExternalEventsTest-1279836267 tempest-ServerExternalEventsTest-1279836267-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.282s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 537.523137] env[62503]: DEBUG nova.compute.manager [None req-e36023cb-3d1e-4b47-ab32-fcc44cb2b55b tempest-ServerExternalEventsTest-1279836267 tempest-ServerExternalEventsTest-1279836267-project-member] [instance: a62798a5-37ba-45be-be56-76e19ce3e189] Start building networks asynchronously for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2832}} [ 537.527782] env[62503]: DEBUG oslo_concurrency.lockutils [None req-fe1e164b-4aab-4244-8a41-f3565d333c9b tempest-ServerDiagnosticsTest-976195064 tempest-ServerDiagnosticsTest-976195064-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.761s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 537.529035] env[62503]: INFO nova.compute.claims [None req-fe1e164b-4aab-4244-8a41-f3565d333c9b tempest-ServerDiagnosticsTest-976195064 tempest-ServerDiagnosticsTest-976195064-project-member] [instance: d88c42de-dafe-4bb1-bd56-a770524529f3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 537.539085] env[62503]: DEBUG oslo_concurrency.lockutils [None req-2175fec1-420b-4607-b7a6-fef627f43b34 tempest-ImagesOneServerNegativeTestJSON-1494729482 tempest-ImagesOneServerNegativeTestJSON-1494729482-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 537.553345] env[62503]: DEBUG nova.compute.manager [None req-58237dd7-a314-47a2-91b0-62504c9b4c3a tempest-MigrationsAdminTest-1483146718 tempest-MigrationsAdminTest-1483146718-project-member] [instance: 18c503dc-6283-4489-a69c-8dead1ec3a0d] Starting instance... {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 538.037771] env[62503]: DEBUG nova.compute.utils [None req-e36023cb-3d1e-4b47-ab32-fcc44cb2b55b tempest-ServerExternalEventsTest-1279836267 tempest-ServerExternalEventsTest-1279836267-project-member] Using /dev/sd instead of None {{(pid=62503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 538.040794] env[62503]: DEBUG nova.compute.manager [None req-e36023cb-3d1e-4b47-ab32-fcc44cb2b55b tempest-ServerExternalEventsTest-1279836267 tempest-ServerExternalEventsTest-1279836267-project-member] [instance: a62798a5-37ba-45be-be56-76e19ce3e189] Allocating IP information in the background. {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 538.041083] env[62503]: DEBUG nova.network.neutron [None req-e36023cb-3d1e-4b47-ab32-fcc44cb2b55b tempest-ServerExternalEventsTest-1279836267 tempest-ServerExternalEventsTest-1279836267-project-member] [instance: a62798a5-37ba-45be-be56-76e19ce3e189] allocate_for_instance() {{(pid=62503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 538.095898] env[62503]: DEBUG oslo_concurrency.lockutils [None req-58237dd7-a314-47a2-91b0-62504c9b4c3a tempest-MigrationsAdminTest-1483146718 tempest-MigrationsAdminTest-1483146718-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 538.549991] env[62503]: DEBUG nova.compute.manager [None req-e36023cb-3d1e-4b47-ab32-fcc44cb2b55b tempest-ServerExternalEventsTest-1279836267 tempest-ServerExternalEventsTest-1279836267-project-member] [instance: a62798a5-37ba-45be-be56-76e19ce3e189] Start building block device mappings for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2867}} [ 538.697614] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72f8dbd6-731f-466a-9cf2-7f2379e9580a {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 538.714442] env[62503]: DEBUG nova.policy [None req-e36023cb-3d1e-4b47-ab32-fcc44cb2b55b tempest-ServerExternalEventsTest-1279836267 tempest-ServerExternalEventsTest-1279836267-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ea6f93f9842f46a9b5e66c403870e282', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'cb7277d01405409f86cb1893266b383d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62503) authorize /opt/stack/nova/nova/policy.py:201}} [ 538.722366] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b5278fd-4dcc-4109-a116-f603aa31d9b7 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 538.757945] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-caa3eaf9-b6d2-44fb-887c-342f0eaf98b3 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 538.766064] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dd58fbe-16f2-4a7d-ac25-ac8de3a6551e {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 538.785515] env[62503]: DEBUG nova.compute.provider_tree [None req-fe1e164b-4aab-4244-8a41-f3565d333c9b tempest-ServerDiagnosticsTest-976195064 tempest-ServerDiagnosticsTest-976195064-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 539.290755] env[62503]: DEBUG nova.scheduler.client.report [None req-fe1e164b-4aab-4244-8a41-f3565d333c9b tempest-ServerDiagnosticsTest-976195064 tempest-ServerDiagnosticsTest-976195064-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 539.563367] env[62503]: DEBUG nova.compute.manager [None req-e36023cb-3d1e-4b47-ab32-fcc44cb2b55b tempest-ServerExternalEventsTest-1279836267 tempest-ServerExternalEventsTest-1279836267-project-member] [instance: a62798a5-37ba-45be-be56-76e19ce3e189] Start spawning the instance on the hypervisor. {{(pid=62503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2641}} [ 539.606398] env[62503]: DEBUG nova.virt.hardware [None req-e36023cb-3d1e-4b47-ab32-fcc44cb2b55b tempest-ServerExternalEventsTest-1279836267 tempest-ServerExternalEventsTest-1279836267-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:26:20Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:26:03Z,direct_url=,disk_format='vmdk',id=8150ca02-f879-471d-8913-459408f127a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26d9ee75d25b4018b8bb1fb11c8bd98c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:26:04Z,virtual_size=,visibility=), allow threads: False {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 539.606645] env[62503]: DEBUG nova.virt.hardware [None req-e36023cb-3d1e-4b47-ab32-fcc44cb2b55b tempest-ServerExternalEventsTest-1279836267 tempest-ServerExternalEventsTest-1279836267-project-member] Flavor limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 539.606760] env[62503]: DEBUG nova.virt.hardware [None req-e36023cb-3d1e-4b47-ab32-fcc44cb2b55b tempest-ServerExternalEventsTest-1279836267 tempest-ServerExternalEventsTest-1279836267-project-member] Image limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 539.606971] env[62503]: DEBUG nova.virt.hardware [None req-e36023cb-3d1e-4b47-ab32-fcc44cb2b55b tempest-ServerExternalEventsTest-1279836267 tempest-ServerExternalEventsTest-1279836267-project-member] Flavor pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 539.609232] env[62503]: DEBUG nova.virt.hardware [None req-e36023cb-3d1e-4b47-ab32-fcc44cb2b55b tempest-ServerExternalEventsTest-1279836267 tempest-ServerExternalEventsTest-1279836267-project-member] Image pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 539.609447] env[62503]: DEBUG nova.virt.hardware [None req-e36023cb-3d1e-4b47-ab32-fcc44cb2b55b tempest-ServerExternalEventsTest-1279836267 tempest-ServerExternalEventsTest-1279836267-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 539.609712] env[62503]: DEBUG nova.virt.hardware [None req-e36023cb-3d1e-4b47-ab32-fcc44cb2b55b tempest-ServerExternalEventsTest-1279836267 tempest-ServerExternalEventsTest-1279836267-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 539.609855] env[62503]: DEBUG nova.virt.hardware [None req-e36023cb-3d1e-4b47-ab32-fcc44cb2b55b tempest-ServerExternalEventsTest-1279836267 tempest-ServerExternalEventsTest-1279836267-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 539.610310] env[62503]: DEBUG nova.virt.hardware [None req-e36023cb-3d1e-4b47-ab32-fcc44cb2b55b tempest-ServerExternalEventsTest-1279836267 tempest-ServerExternalEventsTest-1279836267-project-member] Got 1 possible topologies {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 539.610483] env[62503]: DEBUG nova.virt.hardware [None req-e36023cb-3d1e-4b47-ab32-fcc44cb2b55b tempest-ServerExternalEventsTest-1279836267 tempest-ServerExternalEventsTest-1279836267-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 539.610679] env[62503]: DEBUG nova.virt.hardware [None req-e36023cb-3d1e-4b47-ab32-fcc44cb2b55b tempest-ServerExternalEventsTest-1279836267 tempest-ServerExternalEventsTest-1279836267-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 539.611868] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ea0a7cb-5a30-4ed6-b88f-b2e933a30648 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 539.622680] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c9fd521-ef85-4e43-8884-222420ab8a7a {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 539.644420] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1c82f76-1664-4b6f-b2c4-054cb4890748 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 539.799222] env[62503]: DEBUG oslo_concurrency.lockutils [None req-fe1e164b-4aab-4244-8a41-f3565d333c9b tempest-ServerDiagnosticsTest-976195064 tempest-ServerDiagnosticsTest-976195064-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.271s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 539.800260] env[62503]: DEBUG nova.compute.manager [None req-fe1e164b-4aab-4244-8a41-f3565d333c9b tempest-ServerDiagnosticsTest-976195064 tempest-ServerDiagnosticsTest-976195064-project-member] [instance: d88c42de-dafe-4bb1-bd56-a770524529f3] Start building networks asynchronously for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2832}} [ 539.804487] env[62503]: DEBUG oslo_concurrency.lockutils [None req-0b7a3a73-1934-490b-9fc5-04cb3dcd618e tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.331s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 539.807032] env[62503]: INFO nova.compute.claims [None req-0b7a3a73-1934-490b-9fc5-04cb3dcd618e tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] [instance: 90e42997-a34c-4a39-8d2f-7ab0ed19f028] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 539.977546] env[62503]: DEBUG nova.network.neutron [None req-e36023cb-3d1e-4b47-ab32-fcc44cb2b55b tempest-ServerExternalEventsTest-1279836267 tempest-ServerExternalEventsTest-1279836267-project-member] [instance: a62798a5-37ba-45be-be56-76e19ce3e189] Successfully created port: b1294331-b052-4254-aac2-3ccceb5a52ef {{(pid=62503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 540.311265] env[62503]: DEBUG nova.compute.utils [None req-fe1e164b-4aab-4244-8a41-f3565d333c9b tempest-ServerDiagnosticsTest-976195064 tempest-ServerDiagnosticsTest-976195064-project-member] Using /dev/sd instead of None {{(pid=62503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 540.318893] env[62503]: DEBUG nova.compute.manager [None req-fe1e164b-4aab-4244-8a41-f3565d333c9b tempest-ServerDiagnosticsTest-976195064 tempest-ServerDiagnosticsTest-976195064-project-member] [instance: d88c42de-dafe-4bb1-bd56-a770524529f3] Allocating IP information in the background. {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 540.319802] env[62503]: DEBUG nova.network.neutron [None req-fe1e164b-4aab-4244-8a41-f3565d333c9b tempest-ServerDiagnosticsTest-976195064 tempest-ServerDiagnosticsTest-976195064-project-member] [instance: d88c42de-dafe-4bb1-bd56-a770524529f3] allocate_for_instance() {{(pid=62503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 540.659707] env[62503]: DEBUG nova.policy [None req-fe1e164b-4aab-4244-8a41-f3565d333c9b tempest-ServerDiagnosticsTest-976195064 tempest-ServerDiagnosticsTest-976195064-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'cc2b172ede82487ea90ae3b65bc60340', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9f14f4b380bb47f09316223e2318b0c8', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62503) authorize /opt/stack/nova/nova/policy.py:201}} [ 540.820726] env[62503]: DEBUG nova.compute.manager [None req-fe1e164b-4aab-4244-8a41-f3565d333c9b tempest-ServerDiagnosticsTest-976195064 tempest-ServerDiagnosticsTest-976195064-project-member] [instance: d88c42de-dafe-4bb1-bd56-a770524529f3] Start building block device mappings for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2867}} [ 540.999616] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a13b5a0-d90d-4886-909d-999121c2f2ff {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 541.010478] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3431a440-bb25-4a4f-ad1a-4410b21c3baf {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 541.048761] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5463dd29-8312-4ec9-8f0d-f43af0a08b75 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 541.061062] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ccea7da-977e-4999-8350-866b990ac1b5 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 541.077143] env[62503]: DEBUG nova.compute.provider_tree [None req-0b7a3a73-1934-490b-9fc5-04cb3dcd618e tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 541.360045] env[62503]: DEBUG oslo_concurrency.lockutils [None req-8e02d996-bd10-462a-bd9c-912409f6543d tempest-ServersAdminTestJSON-732764820 tempest-ServersAdminTestJSON-732764820-project-member] Acquiring lock "24e6abd6-fb6f-49ba-b01b-3977ff205fef" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 541.360501] env[62503]: DEBUG oslo_concurrency.lockutils [None req-8e02d996-bd10-462a-bd9c-912409f6543d tempest-ServersAdminTestJSON-732764820 tempest-ServersAdminTestJSON-732764820-project-member] Lock "24e6abd6-fb6f-49ba-b01b-3977ff205fef" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 541.583952] env[62503]: DEBUG nova.scheduler.client.report [None req-0b7a3a73-1934-490b-9fc5-04cb3dcd618e tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 541.837647] env[62503]: DEBUG nova.compute.manager [None req-fe1e164b-4aab-4244-8a41-f3565d333c9b tempest-ServerDiagnosticsTest-976195064 tempest-ServerDiagnosticsTest-976195064-project-member] [instance: d88c42de-dafe-4bb1-bd56-a770524529f3] Start spawning the instance on the hypervisor. {{(pid=62503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2641}} [ 541.863329] env[62503]: DEBUG nova.compute.manager [None req-8e02d996-bd10-462a-bd9c-912409f6543d tempest-ServersAdminTestJSON-732764820 tempest-ServersAdminTestJSON-732764820-project-member] [instance: 24e6abd6-fb6f-49ba-b01b-3977ff205fef] Starting instance... {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 541.869233] env[62503]: DEBUG nova.virt.hardware [None req-fe1e164b-4aab-4244-8a41-f3565d333c9b tempest-ServerDiagnosticsTest-976195064 tempest-ServerDiagnosticsTest-976195064-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:26:20Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:26:03Z,direct_url=,disk_format='vmdk',id=8150ca02-f879-471d-8913-459408f127a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26d9ee75d25b4018b8bb1fb11c8bd98c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:26:04Z,virtual_size=,visibility=), allow threads: False {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 541.869407] env[62503]: DEBUG nova.virt.hardware [None req-fe1e164b-4aab-4244-8a41-f3565d333c9b tempest-ServerDiagnosticsTest-976195064 tempest-ServerDiagnosticsTest-976195064-project-member] Flavor limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 541.869574] env[62503]: DEBUG nova.virt.hardware [None req-fe1e164b-4aab-4244-8a41-f3565d333c9b tempest-ServerDiagnosticsTest-976195064 tempest-ServerDiagnosticsTest-976195064-project-member] Image limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 541.869755] env[62503]: DEBUG nova.virt.hardware [None req-fe1e164b-4aab-4244-8a41-f3565d333c9b tempest-ServerDiagnosticsTest-976195064 tempest-ServerDiagnosticsTest-976195064-project-member] Flavor pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 541.869902] env[62503]: DEBUG nova.virt.hardware [None req-fe1e164b-4aab-4244-8a41-f3565d333c9b tempest-ServerDiagnosticsTest-976195064 tempest-ServerDiagnosticsTest-976195064-project-member] Image pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 541.870072] env[62503]: DEBUG nova.virt.hardware [None req-fe1e164b-4aab-4244-8a41-f3565d333c9b tempest-ServerDiagnosticsTest-976195064 tempest-ServerDiagnosticsTest-976195064-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 541.870462] env[62503]: DEBUG nova.virt.hardware [None req-fe1e164b-4aab-4244-8a41-f3565d333c9b tempest-ServerDiagnosticsTest-976195064 tempest-ServerDiagnosticsTest-976195064-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 541.870462] env[62503]: DEBUG nova.virt.hardware [None req-fe1e164b-4aab-4244-8a41-f3565d333c9b tempest-ServerDiagnosticsTest-976195064 tempest-ServerDiagnosticsTest-976195064-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 541.870626] env[62503]: DEBUG nova.virt.hardware [None req-fe1e164b-4aab-4244-8a41-f3565d333c9b tempest-ServerDiagnosticsTest-976195064 tempest-ServerDiagnosticsTest-976195064-project-member] Got 1 possible topologies {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 541.870779] env[62503]: DEBUG nova.virt.hardware [None req-fe1e164b-4aab-4244-8a41-f3565d333c9b tempest-ServerDiagnosticsTest-976195064 tempest-ServerDiagnosticsTest-976195064-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 541.870946] env[62503]: DEBUG nova.virt.hardware [None req-fe1e164b-4aab-4244-8a41-f3565d333c9b tempest-ServerDiagnosticsTest-976195064 tempest-ServerDiagnosticsTest-976195064-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 541.872116] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc60402e-1147-4f99-8b4d-e458f11fa234 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 541.885157] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbbcc27f-7a4a-4c33-93f1-74dd275ac971 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.090551] env[62503]: DEBUG oslo_concurrency.lockutils [None req-0b7a3a73-1934-490b-9fc5-04cb3dcd618e tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.287s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 542.091109] env[62503]: DEBUG nova.compute.manager [None req-0b7a3a73-1934-490b-9fc5-04cb3dcd618e tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] [instance: 90e42997-a34c-4a39-8d2f-7ab0ed19f028] Start building networks asynchronously for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2832}} [ 542.095837] env[62503]: DEBUG oslo_concurrency.lockutils [None req-b7e47845-e371-42bf-a29f-c41620bf02ac tempest-DeleteServersAdminTestJSON-318637377 tempest-DeleteServersAdminTestJSON-318637377-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.269s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 542.100474] env[62503]: INFO nova.compute.claims [None req-b7e47845-e371-42bf-a29f-c41620bf02ac tempest-DeleteServersAdminTestJSON-318637377 tempest-DeleteServersAdminTestJSON-318637377-project-member] [instance: f8142528-e04c-444a-a252-84e98cecee74] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 542.400009] env[62503]: DEBUG oslo_concurrency.lockutils [None req-8e02d996-bd10-462a-bd9c-912409f6543d tempest-ServersAdminTestJSON-732764820 tempest-ServersAdminTestJSON-732764820-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 542.606673] env[62503]: DEBUG nova.compute.utils [None req-0b7a3a73-1934-490b-9fc5-04cb3dcd618e tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] Using /dev/sd instead of None {{(pid=62503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 542.611180] env[62503]: DEBUG nova.compute.manager [None req-0b7a3a73-1934-490b-9fc5-04cb3dcd618e tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] [instance: 90e42997-a34c-4a39-8d2f-7ab0ed19f028] Not allocating networking since 'none' was specified. {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1980}} [ 542.866236] env[62503]: DEBUG nova.network.neutron [None req-fe1e164b-4aab-4244-8a41-f3565d333c9b tempest-ServerDiagnosticsTest-976195064 tempest-ServerDiagnosticsTest-976195064-project-member] [instance: d88c42de-dafe-4bb1-bd56-a770524529f3] Successfully created port: fe41d699-e1a6-4055-af31-ce12305bcc1c {{(pid=62503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 543.113796] env[62503]: DEBUG nova.compute.manager [None req-0b7a3a73-1934-490b-9fc5-04cb3dcd618e tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] [instance: 90e42997-a34c-4a39-8d2f-7ab0ed19f028] Start building block device mappings for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2867}} [ 543.306572] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0711038-9432-44d4-b6d3-3c84d9f3e322 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.318402] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c3ab593-6550-4c70-a64b-5299282be918 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.349896] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad0c5634-b075-433b-b616-a55cc6efdf1f {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.357568] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc4d0673-a145-4544-aee4-5a3514898f5d {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.372540] env[62503]: DEBUG nova.compute.provider_tree [None req-b7e47845-e371-42bf-a29f-c41620bf02ac tempest-DeleteServersAdminTestJSON-318637377 tempest-DeleteServersAdminTestJSON-318637377-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 543.429931] env[62503]: ERROR nova.compute.manager [None req-e36023cb-3d1e-4b47-ab32-fcc44cb2b55b tempest-ServerExternalEventsTest-1279836267 tempest-ServerExternalEventsTest-1279836267-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port b1294331-b052-4254-aac2-3ccceb5a52ef, please check neutron logs for more information. [ 543.429931] env[62503]: ERROR nova.compute.manager Traceback (most recent call last): [ 543.429931] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 543.429931] env[62503]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 543.429931] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 543.429931] env[62503]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 543.429931] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 543.429931] env[62503]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 543.429931] env[62503]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 543.429931] env[62503]: ERROR nova.compute.manager self.force_reraise() [ 543.429931] env[62503]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 543.429931] env[62503]: ERROR nova.compute.manager raise self.value [ 543.429931] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 543.429931] env[62503]: ERROR nova.compute.manager updated_port = self._update_port( [ 543.429931] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 543.429931] env[62503]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 543.431592] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 543.431592] env[62503]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 543.431592] env[62503]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port b1294331-b052-4254-aac2-3ccceb5a52ef, please check neutron logs for more information. [ 543.431592] env[62503]: ERROR nova.compute.manager [ 543.431592] env[62503]: Traceback (most recent call last): [ 543.431592] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 543.431592] env[62503]: listener.cb(fileno) [ 543.431592] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 543.431592] env[62503]: result = function(*args, **kwargs) [ 543.431592] env[62503]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 543.431592] env[62503]: return func(*args, **kwargs) [ 543.431592] env[62503]: File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 543.431592] env[62503]: raise e [ 543.431592] env[62503]: File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 543.431592] env[62503]: nwinfo = self.network_api.allocate_for_instance( [ 543.431592] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 543.431592] env[62503]: created_port_ids = self._update_ports_for_instance( [ 543.431592] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 543.431592] env[62503]: with excutils.save_and_reraise_exception(): [ 543.431592] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 543.431592] env[62503]: self.force_reraise() [ 543.431592] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 543.431592] env[62503]: raise self.value [ 543.431592] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 543.431592] env[62503]: updated_port = self._update_port( [ 543.431592] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 543.431592] env[62503]: _ensure_no_port_binding_failure(port) [ 543.431592] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 543.431592] env[62503]: raise exception.PortBindingFailed(port_id=port['id']) [ 543.433257] env[62503]: nova.exception.PortBindingFailed: Binding failed for port b1294331-b052-4254-aac2-3ccceb5a52ef, please check neutron logs for more information. [ 543.433257] env[62503]: Removing descriptor: 14 [ 543.433257] env[62503]: ERROR nova.compute.manager [None req-e36023cb-3d1e-4b47-ab32-fcc44cb2b55b tempest-ServerExternalEventsTest-1279836267 tempest-ServerExternalEventsTest-1279836267-project-member] [instance: a62798a5-37ba-45be-be56-76e19ce3e189] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port b1294331-b052-4254-aac2-3ccceb5a52ef, please check neutron logs for more information. [ 543.433257] env[62503]: ERROR nova.compute.manager [instance: a62798a5-37ba-45be-be56-76e19ce3e189] Traceback (most recent call last): [ 543.433257] env[62503]: ERROR nova.compute.manager [instance: a62798a5-37ba-45be-be56-76e19ce3e189] File "/opt/stack/nova/nova/compute/manager.py", line 2897, in _build_resources [ 543.433257] env[62503]: ERROR nova.compute.manager [instance: a62798a5-37ba-45be-be56-76e19ce3e189] yield resources [ 543.433257] env[62503]: ERROR nova.compute.manager [instance: a62798a5-37ba-45be-be56-76e19ce3e189] File "/opt/stack/nova/nova/compute/manager.py", line 2644, in _build_and_run_instance [ 543.433257] env[62503]: ERROR nova.compute.manager [instance: a62798a5-37ba-45be-be56-76e19ce3e189] self.driver.spawn(context, instance, image_meta, [ 543.433257] env[62503]: ERROR nova.compute.manager [instance: a62798a5-37ba-45be-be56-76e19ce3e189] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 543.433257] env[62503]: ERROR nova.compute.manager [instance: a62798a5-37ba-45be-be56-76e19ce3e189] self._vmops.spawn(context, instance, image_meta, injected_files, [ 543.433257] env[62503]: ERROR nova.compute.manager [instance: a62798a5-37ba-45be-be56-76e19ce3e189] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 543.433257] env[62503]: ERROR nova.compute.manager [instance: a62798a5-37ba-45be-be56-76e19ce3e189] vm_ref = self.build_virtual_machine(instance, [ 543.441286] env[62503]: ERROR nova.compute.manager [instance: a62798a5-37ba-45be-be56-76e19ce3e189] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 543.441286] env[62503]: ERROR nova.compute.manager [instance: a62798a5-37ba-45be-be56-76e19ce3e189] vif_infos = vmwarevif.get_vif_info(self._session, [ 543.441286] env[62503]: ERROR nova.compute.manager [instance: a62798a5-37ba-45be-be56-76e19ce3e189] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 543.441286] env[62503]: ERROR nova.compute.manager [instance: a62798a5-37ba-45be-be56-76e19ce3e189] for vif in network_info: [ 543.441286] env[62503]: ERROR nova.compute.manager [instance: a62798a5-37ba-45be-be56-76e19ce3e189] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 543.441286] env[62503]: ERROR nova.compute.manager [instance: a62798a5-37ba-45be-be56-76e19ce3e189] return self._sync_wrapper(fn, *args, **kwargs) [ 543.441286] env[62503]: ERROR nova.compute.manager [instance: a62798a5-37ba-45be-be56-76e19ce3e189] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 543.441286] env[62503]: ERROR nova.compute.manager [instance: a62798a5-37ba-45be-be56-76e19ce3e189] self.wait() [ 543.441286] env[62503]: ERROR nova.compute.manager [instance: a62798a5-37ba-45be-be56-76e19ce3e189] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 543.441286] env[62503]: ERROR nova.compute.manager [instance: a62798a5-37ba-45be-be56-76e19ce3e189] self[:] = self._gt.wait() [ 543.441286] env[62503]: ERROR nova.compute.manager [instance: a62798a5-37ba-45be-be56-76e19ce3e189] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 543.441286] env[62503]: ERROR nova.compute.manager [instance: a62798a5-37ba-45be-be56-76e19ce3e189] return self._exit_event.wait() [ 543.441286] env[62503]: ERROR nova.compute.manager [instance: a62798a5-37ba-45be-be56-76e19ce3e189] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 543.443310] env[62503]: ERROR nova.compute.manager [instance: a62798a5-37ba-45be-be56-76e19ce3e189] result = hub.switch() [ 543.443310] env[62503]: ERROR nova.compute.manager [instance: a62798a5-37ba-45be-be56-76e19ce3e189] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 543.443310] env[62503]: ERROR nova.compute.manager [instance: a62798a5-37ba-45be-be56-76e19ce3e189] return self.greenlet.switch() [ 543.443310] env[62503]: ERROR nova.compute.manager [instance: a62798a5-37ba-45be-be56-76e19ce3e189] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 543.443310] env[62503]: ERROR nova.compute.manager [instance: a62798a5-37ba-45be-be56-76e19ce3e189] result = function(*args, **kwargs) [ 543.443310] env[62503]: ERROR nova.compute.manager [instance: a62798a5-37ba-45be-be56-76e19ce3e189] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 543.443310] env[62503]: ERROR nova.compute.manager [instance: a62798a5-37ba-45be-be56-76e19ce3e189] return func(*args, **kwargs) [ 543.443310] env[62503]: ERROR nova.compute.manager [instance: a62798a5-37ba-45be-be56-76e19ce3e189] File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 543.443310] env[62503]: ERROR nova.compute.manager [instance: a62798a5-37ba-45be-be56-76e19ce3e189] raise e [ 543.443310] env[62503]: ERROR nova.compute.manager [instance: a62798a5-37ba-45be-be56-76e19ce3e189] File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 543.443310] env[62503]: ERROR nova.compute.manager [instance: a62798a5-37ba-45be-be56-76e19ce3e189] nwinfo = self.network_api.allocate_for_instance( [ 543.443310] env[62503]: ERROR nova.compute.manager [instance: a62798a5-37ba-45be-be56-76e19ce3e189] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 543.443310] env[62503]: ERROR nova.compute.manager [instance: a62798a5-37ba-45be-be56-76e19ce3e189] created_port_ids = self._update_ports_for_instance( [ 543.443665] env[62503]: ERROR nova.compute.manager [instance: a62798a5-37ba-45be-be56-76e19ce3e189] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 543.443665] env[62503]: ERROR nova.compute.manager [instance: a62798a5-37ba-45be-be56-76e19ce3e189] with excutils.save_and_reraise_exception(): [ 543.443665] env[62503]: ERROR nova.compute.manager [instance: a62798a5-37ba-45be-be56-76e19ce3e189] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 543.443665] env[62503]: ERROR nova.compute.manager [instance: a62798a5-37ba-45be-be56-76e19ce3e189] self.force_reraise() [ 543.443665] env[62503]: ERROR nova.compute.manager [instance: a62798a5-37ba-45be-be56-76e19ce3e189] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 543.443665] env[62503]: ERROR nova.compute.manager [instance: a62798a5-37ba-45be-be56-76e19ce3e189] raise self.value [ 543.443665] env[62503]: ERROR nova.compute.manager [instance: a62798a5-37ba-45be-be56-76e19ce3e189] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 543.443665] env[62503]: ERROR nova.compute.manager [instance: a62798a5-37ba-45be-be56-76e19ce3e189] updated_port = self._update_port( [ 543.443665] env[62503]: ERROR nova.compute.manager [instance: a62798a5-37ba-45be-be56-76e19ce3e189] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 543.443665] env[62503]: ERROR nova.compute.manager [instance: a62798a5-37ba-45be-be56-76e19ce3e189] _ensure_no_port_binding_failure(port) [ 543.443665] env[62503]: ERROR nova.compute.manager [instance: a62798a5-37ba-45be-be56-76e19ce3e189] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 543.443665] env[62503]: ERROR nova.compute.manager [instance: a62798a5-37ba-45be-be56-76e19ce3e189] raise exception.PortBindingFailed(port_id=port['id']) [ 543.443959] env[62503]: ERROR nova.compute.manager [instance: a62798a5-37ba-45be-be56-76e19ce3e189] nova.exception.PortBindingFailed: Binding failed for port b1294331-b052-4254-aac2-3ccceb5a52ef, please check neutron logs for more information. [ 543.443959] env[62503]: ERROR nova.compute.manager [instance: a62798a5-37ba-45be-be56-76e19ce3e189] [ 543.443959] env[62503]: INFO nova.compute.manager [None req-e36023cb-3d1e-4b47-ab32-fcc44cb2b55b tempest-ServerExternalEventsTest-1279836267 tempest-ServerExternalEventsTest-1279836267-project-member] [instance: a62798a5-37ba-45be-be56-76e19ce3e189] Terminating instance [ 543.443959] env[62503]: DEBUG oslo_concurrency.lockutils [None req-e36023cb-3d1e-4b47-ab32-fcc44cb2b55b tempest-ServerExternalEventsTest-1279836267 tempest-ServerExternalEventsTest-1279836267-project-member] Acquiring lock "refresh_cache-a62798a5-37ba-45be-be56-76e19ce3e189" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 543.443959] env[62503]: DEBUG oslo_concurrency.lockutils [None req-e36023cb-3d1e-4b47-ab32-fcc44cb2b55b tempest-ServerExternalEventsTest-1279836267 tempest-ServerExternalEventsTest-1279836267-project-member] Acquired lock "refresh_cache-a62798a5-37ba-45be-be56-76e19ce3e189" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 543.443959] env[62503]: DEBUG nova.network.neutron [None req-e36023cb-3d1e-4b47-ab32-fcc44cb2b55b tempest-ServerExternalEventsTest-1279836267 tempest-ServerExternalEventsTest-1279836267-project-member] [instance: a62798a5-37ba-45be-be56-76e19ce3e189] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 543.878716] env[62503]: DEBUG nova.scheduler.client.report [None req-b7e47845-e371-42bf-a29f-c41620bf02ac tempest-DeleteServersAdminTestJSON-318637377 tempest-DeleteServersAdminTestJSON-318637377-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 543.985801] env[62503]: DEBUG nova.network.neutron [None req-e36023cb-3d1e-4b47-ab32-fcc44cb2b55b tempest-ServerExternalEventsTest-1279836267 tempest-ServerExternalEventsTest-1279836267-project-member] [instance: a62798a5-37ba-45be-be56-76e19ce3e189] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 544.126120] env[62503]: DEBUG nova.compute.manager [None req-0b7a3a73-1934-490b-9fc5-04cb3dcd618e tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] [instance: 90e42997-a34c-4a39-8d2f-7ab0ed19f028] Start spawning the instance on the hypervisor. {{(pid=62503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2641}} [ 544.156753] env[62503]: DEBUG nova.virt.hardware [None req-0b7a3a73-1934-490b-9fc5-04cb3dcd618e tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:26:20Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:26:03Z,direct_url=,disk_format='vmdk',id=8150ca02-f879-471d-8913-459408f127a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26d9ee75d25b4018b8bb1fb11c8bd98c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:26:04Z,virtual_size=,visibility=), allow threads: False {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 544.156832] env[62503]: DEBUG nova.virt.hardware [None req-0b7a3a73-1934-490b-9fc5-04cb3dcd618e tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] Flavor limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 544.158145] env[62503]: DEBUG nova.virt.hardware [None req-0b7a3a73-1934-490b-9fc5-04cb3dcd618e tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] Image limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 544.159412] env[62503]: DEBUG nova.virt.hardware [None req-0b7a3a73-1934-490b-9fc5-04cb3dcd618e tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] Flavor pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 544.159412] env[62503]: DEBUG nova.virt.hardware [None req-0b7a3a73-1934-490b-9fc5-04cb3dcd618e tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] Image pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 544.159412] env[62503]: DEBUG nova.virt.hardware [None req-0b7a3a73-1934-490b-9fc5-04cb3dcd618e tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 544.159412] env[62503]: DEBUG nova.virt.hardware [None req-0b7a3a73-1934-490b-9fc5-04cb3dcd618e tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 544.160181] env[62503]: DEBUG nova.virt.hardware [None req-0b7a3a73-1934-490b-9fc5-04cb3dcd618e tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 544.160181] env[62503]: DEBUG nova.virt.hardware [None req-0b7a3a73-1934-490b-9fc5-04cb3dcd618e tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] Got 1 possible topologies {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 544.160181] env[62503]: DEBUG nova.virt.hardware [None req-0b7a3a73-1934-490b-9fc5-04cb3dcd618e tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 544.160768] env[62503]: DEBUG nova.virt.hardware [None req-0b7a3a73-1934-490b-9fc5-04cb3dcd618e tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 544.161750] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-323e7aa8-a06c-4394-b601-037523dca5fe {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 544.172765] env[62503]: DEBUG nova.network.neutron [None req-e36023cb-3d1e-4b47-ab32-fcc44cb2b55b tempest-ServerExternalEventsTest-1279836267 tempest-ServerExternalEventsTest-1279836267-project-member] [instance: a62798a5-37ba-45be-be56-76e19ce3e189] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 544.184914] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fd47b4c-99ed-4249-b970-818c0d3a9735 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 544.205208] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-0b7a3a73-1934-490b-9fc5-04cb3dcd618e tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] [instance: 90e42997-a34c-4a39-8d2f-7ab0ed19f028] Instance VIF info [] {{(pid=62503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 544.219030] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b7a3a73-1934-490b-9fc5-04cb3dcd618e tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] Creating folder: OpenStack. Parent ref: group-v4. {{(pid=62503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 544.222397] env[62503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-13040b34-efcb-4fed-a47e-ce4f16a308bb {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 544.233037] env[62503]: INFO nova.virt.vmwareapi.vm_util [None req-0b7a3a73-1934-490b-9fc5-04cb3dcd618e tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] Created folder: OpenStack in parent group-v4. [ 544.235367] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b7a3a73-1934-490b-9fc5-04cb3dcd618e tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] Creating folder: Project (40321110bfbf462991c312faeda53542). Parent ref: group-v294540. {{(pid=62503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 544.235367] env[62503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4d1b51c0-3c96-45af-97be-f02b1a29e79b {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 544.247838] env[62503]: INFO nova.virt.vmwareapi.vm_util [None req-0b7a3a73-1934-490b-9fc5-04cb3dcd618e tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] Created folder: Project (40321110bfbf462991c312faeda53542) in parent group-v294540. [ 544.248306] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b7a3a73-1934-490b-9fc5-04cb3dcd618e tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] Creating folder: Instances. Parent ref: group-v294541. {{(pid=62503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 544.248771] env[62503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3c1eda3e-2787-4704-81d3-ac2ba19ee6c0 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 544.261018] env[62503]: INFO nova.virt.vmwareapi.vm_util [None req-0b7a3a73-1934-490b-9fc5-04cb3dcd618e tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] Created folder: Instances in parent group-v294541. [ 544.261018] env[62503]: DEBUG oslo.service.loopingcall [None req-0b7a3a73-1934-490b-9fc5-04cb3dcd618e tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 544.261018] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 90e42997-a34c-4a39-8d2f-7ab0ed19f028] Creating VM on the ESX host {{(pid=62503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 544.261018] env[62503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8ee6d5f8-3417-46b0-a18e-bf06cdf762bf {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 544.288048] env[62503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 544.288048] env[62503]: value = "task-1387658" [ 544.288048] env[62503]: _type = "Task" [ 544.288048] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 544.298998] env[62503]: DEBUG oslo_vmware.api [-] Task: {'id': task-1387658, 'name': CreateVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 544.384699] env[62503]: DEBUG oslo_concurrency.lockutils [None req-b7e47845-e371-42bf-a29f-c41620bf02ac tempest-DeleteServersAdminTestJSON-318637377 tempest-DeleteServersAdminTestJSON-318637377-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.289s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 544.386114] env[62503]: DEBUG nova.compute.manager [None req-b7e47845-e371-42bf-a29f-c41620bf02ac tempest-DeleteServersAdminTestJSON-318637377 tempest-DeleteServersAdminTestJSON-318637377-project-member] [instance: f8142528-e04c-444a-a252-84e98cecee74] Start building networks asynchronously for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2832}} [ 544.388325] env[62503]: DEBUG oslo_concurrency.lockutils [None req-6237cd83-53e5-445e-991f-0fa75149e2aa tempest-TenantUsagesTestJSON-2099757901 tempest-TenantUsagesTestJSON-2099757901-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.530s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 544.390277] env[62503]: INFO nova.compute.claims [None req-6237cd83-53e5-445e-991f-0fa75149e2aa tempest-TenantUsagesTestJSON-2099757901 tempest-TenantUsagesTestJSON-2099757901-project-member] [instance: b570c2ad-18ac-42ea-bc2e-009992ece3fe] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 544.586122] env[62503]: DEBUG oslo_concurrency.lockutils [None req-be334e62-09cf-48bb-879c-48dba1066c83 tempest-ServersAdminTestJSON-732764820 tempest-ServersAdminTestJSON-732764820-project-member] Acquiring lock "78599fa1-be64-4797-92a9-ebc3a40b59a1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 544.586380] env[62503]: DEBUG oslo_concurrency.lockutils [None req-be334e62-09cf-48bb-879c-48dba1066c83 tempest-ServersAdminTestJSON-732764820 tempest-ServersAdminTestJSON-732764820-project-member] Lock "78599fa1-be64-4797-92a9-ebc3a40b59a1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 544.678513] env[62503]: DEBUG oslo_concurrency.lockutils [None req-e36023cb-3d1e-4b47-ab32-fcc44cb2b55b tempest-ServerExternalEventsTest-1279836267 tempest-ServerExternalEventsTest-1279836267-project-member] Releasing lock "refresh_cache-a62798a5-37ba-45be-be56-76e19ce3e189" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 544.679388] env[62503]: DEBUG nova.compute.manager [None req-e36023cb-3d1e-4b47-ab32-fcc44cb2b55b tempest-ServerExternalEventsTest-1279836267 tempest-ServerExternalEventsTest-1279836267-project-member] [instance: a62798a5-37ba-45be-be56-76e19ce3e189] Start destroying the instance on the hypervisor. {{(pid=62503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3156}} [ 544.679388] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-e36023cb-3d1e-4b47-ab32-fcc44cb2b55b tempest-ServerExternalEventsTest-1279836267 tempest-ServerExternalEventsTest-1279836267-project-member] [instance: a62798a5-37ba-45be-be56-76e19ce3e189] Destroying instance {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 544.679818] env[62503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-76c706bd-a8f6-4a39-8dd4-c8fe7e58590e {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 544.691130] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3221867-7ffd-42d5-8fc1-dc8a8056c9ae {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 544.720854] env[62503]: WARNING nova.virt.vmwareapi.vmops [None req-e36023cb-3d1e-4b47-ab32-fcc44cb2b55b tempest-ServerExternalEventsTest-1279836267 tempest-ServerExternalEventsTest-1279836267-project-member] [instance: a62798a5-37ba-45be-be56-76e19ce3e189] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance a62798a5-37ba-45be-be56-76e19ce3e189 could not be found. [ 544.721310] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-e36023cb-3d1e-4b47-ab32-fcc44cb2b55b tempest-ServerExternalEventsTest-1279836267 tempest-ServerExternalEventsTest-1279836267-project-member] [instance: a62798a5-37ba-45be-be56-76e19ce3e189] Instance destroyed {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 544.721824] env[62503]: INFO nova.compute.manager [None req-e36023cb-3d1e-4b47-ab32-fcc44cb2b55b tempest-ServerExternalEventsTest-1279836267 tempest-ServerExternalEventsTest-1279836267-project-member] [instance: a62798a5-37ba-45be-be56-76e19ce3e189] Took 0.04 seconds to destroy the instance on the hypervisor. [ 544.722220] env[62503]: DEBUG oslo.service.loopingcall [None req-e36023cb-3d1e-4b47-ab32-fcc44cb2b55b tempest-ServerExternalEventsTest-1279836267 tempest-ServerExternalEventsTest-1279836267-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 544.723115] env[62503]: DEBUG nova.compute.manager [-] [instance: a62798a5-37ba-45be-be56-76e19ce3e189] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 544.723115] env[62503]: DEBUG nova.network.neutron [-] [instance: a62798a5-37ba-45be-be56-76e19ce3e189] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 544.785033] env[62503]: DEBUG nova.network.neutron [-] [instance: a62798a5-37ba-45be-be56-76e19ce3e189] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 544.801171] env[62503]: DEBUG oslo_vmware.api [-] Task: {'id': task-1387658, 'name': CreateVM_Task} progress is 25%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 544.895329] env[62503]: DEBUG nova.compute.utils [None req-b7e47845-e371-42bf-a29f-c41620bf02ac tempest-DeleteServersAdminTestJSON-318637377 tempest-DeleteServersAdminTestJSON-318637377-project-member] Using /dev/sd instead of None {{(pid=62503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 544.900159] env[62503]: DEBUG nova.compute.manager [None req-b7e47845-e371-42bf-a29f-c41620bf02ac tempest-DeleteServersAdminTestJSON-318637377 tempest-DeleteServersAdminTestJSON-318637377-project-member] [instance: f8142528-e04c-444a-a252-84e98cecee74] Allocating IP information in the background. {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 544.900772] env[62503]: DEBUG nova.network.neutron [None req-b7e47845-e371-42bf-a29f-c41620bf02ac tempest-DeleteServersAdminTestJSON-318637377 tempest-DeleteServersAdminTestJSON-318637377-project-member] [instance: f8142528-e04c-444a-a252-84e98cecee74] allocate_for_instance() {{(pid=62503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 545.084474] env[62503]: DEBUG nova.policy [None req-b7e47845-e371-42bf-a29f-c41620bf02ac tempest-DeleteServersAdminTestJSON-318637377 tempest-DeleteServersAdminTestJSON-318637377-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '651b5babbd204dba8928d697353a33b1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '87268f38d19e4ea485502b201120a947', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62503) authorize /opt/stack/nova/nova/policy.py:201}} [ 545.091221] env[62503]: DEBUG nova.compute.manager [None req-be334e62-09cf-48bb-879c-48dba1066c83 tempest-ServersAdminTestJSON-732764820 tempest-ServersAdminTestJSON-732764820-project-member] [instance: 78599fa1-be64-4797-92a9-ebc3a40b59a1] Starting instance... {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 545.173545] env[62503]: DEBUG oslo_service.periodic_task [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 545.173889] env[62503]: DEBUG oslo_service.periodic_task [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 545.174093] env[62503]: DEBUG nova.compute.manager [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Starting heal instance info cache {{(pid=62503) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10224}} [ 545.174217] env[62503]: DEBUG nova.compute.manager [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Rebuilding the list of instances to heal {{(pid=62503) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10228}} [ 545.259785] env[62503]: DEBUG nova.compute.manager [req-aaf04816-c12b-4595-8f4d-ca7bf81d9e1c req-e1f676a5-6083-4057-b964-6ba14f198e33 service nova] [instance: a62798a5-37ba-45be-be56-76e19ce3e189] Received event network-changed-b1294331-b052-4254-aac2-3ccceb5a52ef {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 545.260097] env[62503]: DEBUG nova.compute.manager [req-aaf04816-c12b-4595-8f4d-ca7bf81d9e1c req-e1f676a5-6083-4057-b964-6ba14f198e33 service nova] [instance: a62798a5-37ba-45be-be56-76e19ce3e189] Refreshing instance network info cache due to event network-changed-b1294331-b052-4254-aac2-3ccceb5a52ef. {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11432}} [ 545.260427] env[62503]: DEBUG oslo_concurrency.lockutils [req-aaf04816-c12b-4595-8f4d-ca7bf81d9e1c req-e1f676a5-6083-4057-b964-6ba14f198e33 service nova] Acquiring lock "refresh_cache-a62798a5-37ba-45be-be56-76e19ce3e189" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 545.260648] env[62503]: DEBUG oslo_concurrency.lockutils [req-aaf04816-c12b-4595-8f4d-ca7bf81d9e1c req-e1f676a5-6083-4057-b964-6ba14f198e33 service nova] Acquired lock "refresh_cache-a62798a5-37ba-45be-be56-76e19ce3e189" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 545.261354] env[62503]: DEBUG nova.network.neutron [req-aaf04816-c12b-4595-8f4d-ca7bf81d9e1c req-e1f676a5-6083-4057-b964-6ba14f198e33 service nova] [instance: a62798a5-37ba-45be-be56-76e19ce3e189] Refreshing network info cache for port b1294331-b052-4254-aac2-3ccceb5a52ef {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 545.287127] env[62503]: DEBUG nova.network.neutron [-] [instance: a62798a5-37ba-45be-be56-76e19ce3e189] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 545.299824] env[62503]: DEBUG oslo_vmware.api [-] Task: {'id': task-1387658, 'name': CreateVM_Task, 'duration_secs': 0.752246} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 545.300822] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 90e42997-a34c-4a39-8d2f-7ab0ed19f028] Created VM on the ESX host {{(pid=62503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 545.301897] env[62503]: DEBUG oslo_vmware.service [None req-0b7a3a73-1934-490b-9fc5-04cb3dcd618e tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ac9785c-1819-40b8-a7ce-8be0580b8f04 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.308894] env[62503]: DEBUG oslo_concurrency.lockutils [None req-0b7a3a73-1934-490b-9fc5-04cb3dcd618e tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 545.308894] env[62503]: DEBUG oslo_concurrency.lockutils [None req-0b7a3a73-1934-490b-9fc5-04cb3dcd618e tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] Acquired lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 545.309232] env[62503]: DEBUG oslo_concurrency.lockutils [None req-0b7a3a73-1934-490b-9fc5-04cb3dcd618e tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 545.309717] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6c4b592c-f55e-4fcc-88c0-726ecd1da859 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.316920] env[62503]: DEBUG oslo_vmware.api [None req-0b7a3a73-1934-490b-9fc5-04cb3dcd618e tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] Waiting for the task: (returnval){ [ 545.316920] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52e8b6b8-44e1-4985-d57f-daf025cdb101" [ 545.316920] env[62503]: _type = "Task" [ 545.316920] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 545.327303] env[62503]: DEBUG oslo_vmware.api [None req-0b7a3a73-1934-490b-9fc5-04cb3dcd618e tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52e8b6b8-44e1-4985-d57f-daf025cdb101, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 545.402182] env[62503]: DEBUG nova.compute.manager [None req-b7e47845-e371-42bf-a29f-c41620bf02ac tempest-DeleteServersAdminTestJSON-318637377 tempest-DeleteServersAdminTestJSON-318637377-project-member] [instance: f8142528-e04c-444a-a252-84e98cecee74] Start building block device mappings for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2867}} [ 545.625147] env[62503]: DEBUG oslo_concurrency.lockutils [None req-be334e62-09cf-48bb-879c-48dba1066c83 tempest-ServersAdminTestJSON-732764820 tempest-ServersAdminTestJSON-732764820-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 545.635382] env[62503]: DEBUG oslo_concurrency.lockutils [None req-eb0402ba-4447-4563-bad3-226e21db94d7 tempest-ServersAdminNegativeTestJSON-275877041 tempest-ServersAdminNegativeTestJSON-275877041-project-member] Acquiring lock "9f83ec50-5143-45e1-849a-5c441d2702e2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 545.635382] env[62503]: DEBUG oslo_concurrency.lockutils [None req-eb0402ba-4447-4563-bad3-226e21db94d7 tempest-ServersAdminNegativeTestJSON-275877041 tempest-ServersAdminNegativeTestJSON-275877041-project-member] Lock "9f83ec50-5143-45e1-849a-5c441d2702e2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 545.663683] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93e5031a-148e-4e5e-8138-0eec816f83e1 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.675478] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9afad0c0-ae50-4884-bc55-471811a36985 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.680635] env[62503]: DEBUG nova.compute.manager [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] [instance: a62798a5-37ba-45be-be56-76e19ce3e189] Skipping network cache update for instance because it is Building. {{(pid=62503) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10237}} [ 545.680635] env[62503]: DEBUG nova.compute.manager [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] [instance: d88c42de-dafe-4bb1-bd56-a770524529f3] Skipping network cache update for instance because it is Building. {{(pid=62503) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10237}} [ 545.680767] env[62503]: DEBUG nova.compute.manager [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] [instance: 90e42997-a34c-4a39-8d2f-7ab0ed19f028] Skipping network cache update for instance because it is Building. {{(pid=62503) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10237}} [ 545.680828] env[62503]: DEBUG nova.compute.manager [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] [instance: b570c2ad-18ac-42ea-bc2e-009992ece3fe] Skipping network cache update for instance because it is Building. {{(pid=62503) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10237}} [ 545.681042] env[62503]: DEBUG nova.compute.manager [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] [instance: f8142528-e04c-444a-a252-84e98cecee74] Skipping network cache update for instance because it is Building. {{(pid=62503) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10237}} [ 545.681249] env[62503]: DEBUG nova.compute.manager [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Didn't find any instances for network info cache update. {{(pid=62503) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10310}} [ 545.681975] env[62503]: DEBUG oslo_service.periodic_task [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 545.683614] env[62503]: DEBUG oslo_service.periodic_task [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 545.683614] env[62503]: DEBUG oslo_service.periodic_task [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 545.683614] env[62503]: DEBUG oslo_service.periodic_task [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 545.683889] env[62503]: DEBUG oslo_service.periodic_task [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 545.684196] env[62503]: DEBUG oslo_service.periodic_task [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 545.684196] env[62503]: DEBUG nova.compute.manager [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62503) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10843}} [ 545.684578] env[62503]: DEBUG oslo_service.periodic_task [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 545.718389] env[62503]: DEBUG oslo_concurrency.lockutils [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 545.718949] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fa3cfbb-3348-444c-9358-d35b0a2fb6aa {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.728143] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fadf91ad-22ec-483f-b1f4-81dea601f6b8 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.745232] env[62503]: DEBUG nova.compute.provider_tree [None req-6237cd83-53e5-445e-991f-0fa75149e2aa tempest-TenantUsagesTestJSON-2099757901 tempest-TenantUsagesTestJSON-2099757901-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 545.790123] env[62503]: INFO nova.compute.manager [-] [instance: a62798a5-37ba-45be-be56-76e19ce3e189] Took 1.07 seconds to deallocate network for instance. [ 545.797959] env[62503]: DEBUG nova.compute.claims [None req-e36023cb-3d1e-4b47-ab32-fcc44cb2b55b tempest-ServerExternalEventsTest-1279836267 tempest-ServerExternalEventsTest-1279836267-project-member] [instance: a62798a5-37ba-45be-be56-76e19ce3e189] Aborting claim: {{(pid=62503) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 545.797959] env[62503]: DEBUG oslo_concurrency.lockutils [None req-e36023cb-3d1e-4b47-ab32-fcc44cb2b55b tempest-ServerExternalEventsTest-1279836267 tempest-ServerExternalEventsTest-1279836267-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 545.829521] env[62503]: DEBUG oslo_concurrency.lockutils [None req-0b7a3a73-1934-490b-9fc5-04cb3dcd618e tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] Releasing lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 545.829969] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-0b7a3a73-1934-490b-9fc5-04cb3dcd618e tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] [instance: 90e42997-a34c-4a39-8d2f-7ab0ed19f028] Processing image 8150ca02-f879-471d-8913-459408f127a1 {{(pid=62503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 545.830078] env[62503]: DEBUG oslo_concurrency.lockutils [None req-0b7a3a73-1934-490b-9fc5-04cb3dcd618e tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 545.830184] env[62503]: DEBUG oslo_concurrency.lockutils [None req-0b7a3a73-1934-490b-9fc5-04cb3dcd618e tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] Acquired lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 545.830616] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-0b7a3a73-1934-490b-9fc5-04cb3dcd618e tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 545.830949] env[62503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-61351763-5328-410e-85ec-6912d2531a20 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.846299] env[62503]: DEBUG nova.network.neutron [req-aaf04816-c12b-4595-8f4d-ca7bf81d9e1c req-e1f676a5-6083-4057-b964-6ba14f198e33 service nova] [instance: a62798a5-37ba-45be-be56-76e19ce3e189] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 545.851357] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-0b7a3a73-1934-490b-9fc5-04cb3dcd618e tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 545.851530] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-0b7a3a73-1934-490b-9fc5-04cb3dcd618e tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 545.852385] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a14c3a51-d0f5-4aae-8cbe-35e9f9959cb4 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.862079] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-750f4293-fa06-49f7-b701-3369bb9ae48d {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.867885] env[62503]: DEBUG oslo_vmware.api [None req-0b7a3a73-1934-490b-9fc5-04cb3dcd618e tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] Waiting for the task: (returnval){ [ 545.867885] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52cf75dc-a2f1-c268-2882-1db4ff963039" [ 545.867885] env[62503]: _type = "Task" [ 545.867885] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 545.877781] env[62503]: DEBUG oslo_vmware.api [None req-0b7a3a73-1934-490b-9fc5-04cb3dcd618e tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52cf75dc-a2f1-c268-2882-1db4ff963039, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 545.903799] env[62503]: DEBUG oslo_concurrency.lockutils [None req-5be7067b-f1af-42fc-acec-92c0c71cc7a8 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Acquiring lock "c4a88e75-690f-4bed-a4f9-a0de3b193eff" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 545.904142] env[62503]: DEBUG oslo_concurrency.lockutils [None req-5be7067b-f1af-42fc-acec-92c0c71cc7a8 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Lock "c4a88e75-690f-4bed-a4f9-a0de3b193eff" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 546.138604] env[62503]: DEBUG nova.compute.manager [None req-eb0402ba-4447-4563-bad3-226e21db94d7 tempest-ServersAdminNegativeTestJSON-275877041 tempest-ServersAdminNegativeTestJSON-275877041-project-member] [instance: 9f83ec50-5143-45e1-849a-5c441d2702e2] Starting instance... {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 546.227430] env[62503]: DEBUG nova.network.neutron [req-aaf04816-c12b-4595-8f4d-ca7bf81d9e1c req-e1f676a5-6083-4057-b964-6ba14f198e33 service nova] [instance: a62798a5-37ba-45be-be56-76e19ce3e189] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 546.249270] env[62503]: DEBUG nova.scheduler.client.report [None req-6237cd83-53e5-445e-991f-0fa75149e2aa tempest-TenantUsagesTestJSON-2099757901 tempest-TenantUsagesTestJSON-2099757901-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 546.383215] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-0b7a3a73-1934-490b-9fc5-04cb3dcd618e tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] [instance: 90e42997-a34c-4a39-8d2f-7ab0ed19f028] Preparing fetch location {{(pid=62503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 546.383488] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-0b7a3a73-1934-490b-9fc5-04cb3dcd618e tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] Creating directory with path [datastore1] vmware_temp/47d6f6ac-3e07-44f8-8cf2-9cc21ff9d08d/8150ca02-f879-471d-8913-459408f127a1 {{(pid=62503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 546.383733] env[62503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-39e8bde9-ebae-411c-886c-7e05e8b6784d {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.407532] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-0b7a3a73-1934-490b-9fc5-04cb3dcd618e tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] Created directory with path [datastore1] vmware_temp/47d6f6ac-3e07-44f8-8cf2-9cc21ff9d08d/8150ca02-f879-471d-8913-459408f127a1 {{(pid=62503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 546.407532] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-0b7a3a73-1934-490b-9fc5-04cb3dcd618e tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] [instance: 90e42997-a34c-4a39-8d2f-7ab0ed19f028] Fetch image to [datastore1] vmware_temp/47d6f6ac-3e07-44f8-8cf2-9cc21ff9d08d/8150ca02-f879-471d-8913-459408f127a1/tmp-sparse.vmdk {{(pid=62503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 546.407532] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-0b7a3a73-1934-490b-9fc5-04cb3dcd618e tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] [instance: 90e42997-a34c-4a39-8d2f-7ab0ed19f028] Downloading image file data 8150ca02-f879-471d-8913-459408f127a1 to [datastore1] vmware_temp/47d6f6ac-3e07-44f8-8cf2-9cc21ff9d08d/8150ca02-f879-471d-8913-459408f127a1/tmp-sparse.vmdk on the data store datastore1 {{(pid=62503) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 546.407988] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92db6301-f6e8-49f5-b788-460d8693912d {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.415515] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc5f53ea-678f-4686-88d4-8c91b4fc700c {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.421794] env[62503]: DEBUG nova.compute.manager [None req-b7e47845-e371-42bf-a29f-c41620bf02ac tempest-DeleteServersAdminTestJSON-318637377 tempest-DeleteServersAdminTestJSON-318637377-project-member] [instance: f8142528-e04c-444a-a252-84e98cecee74] Start spawning the instance on the hypervisor. {{(pid=62503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2641}} [ 546.435167] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8923fbaa-d93c-4c5d-994d-2ab48217bf14 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.472298] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81c14b69-73e0-4f3a-b48b-55f38e7c8737 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.478683] env[62503]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-26d1a329-b8ba-4618-9eb8-3e68607c8824 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.482938] env[62503]: DEBUG nova.virt.hardware [None req-b7e47845-e371-42bf-a29f-c41620bf02ac tempest-DeleteServersAdminTestJSON-318637377 tempest-DeleteServersAdminTestJSON-318637377-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:26:20Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:26:03Z,direct_url=,disk_format='vmdk',id=8150ca02-f879-471d-8913-459408f127a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26d9ee75d25b4018b8bb1fb11c8bd98c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:26:04Z,virtual_size=,visibility=), allow threads: False {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 546.483692] env[62503]: DEBUG nova.virt.hardware [None req-b7e47845-e371-42bf-a29f-c41620bf02ac tempest-DeleteServersAdminTestJSON-318637377 tempest-DeleteServersAdminTestJSON-318637377-project-member] Flavor limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 546.484122] env[62503]: DEBUG nova.virt.hardware [None req-b7e47845-e371-42bf-a29f-c41620bf02ac tempest-DeleteServersAdminTestJSON-318637377 tempest-DeleteServersAdminTestJSON-318637377-project-member] Image limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 546.487315] env[62503]: DEBUG nova.virt.hardware [None req-b7e47845-e371-42bf-a29f-c41620bf02ac tempest-DeleteServersAdminTestJSON-318637377 tempest-DeleteServersAdminTestJSON-318637377-project-member] Flavor pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 546.488143] env[62503]: DEBUG nova.virt.hardware [None req-b7e47845-e371-42bf-a29f-c41620bf02ac tempest-DeleteServersAdminTestJSON-318637377 tempest-DeleteServersAdminTestJSON-318637377-project-member] Image pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 546.488143] env[62503]: DEBUG nova.virt.hardware [None req-b7e47845-e371-42bf-a29f-c41620bf02ac tempest-DeleteServersAdminTestJSON-318637377 tempest-DeleteServersAdminTestJSON-318637377-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 546.488143] env[62503]: DEBUG nova.virt.hardware [None req-b7e47845-e371-42bf-a29f-c41620bf02ac tempest-DeleteServersAdminTestJSON-318637377 tempest-DeleteServersAdminTestJSON-318637377-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 546.488143] env[62503]: DEBUG nova.virt.hardware [None req-b7e47845-e371-42bf-a29f-c41620bf02ac tempest-DeleteServersAdminTestJSON-318637377 tempest-DeleteServersAdminTestJSON-318637377-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 546.488268] env[62503]: DEBUG nova.virt.hardware [None req-b7e47845-e371-42bf-a29f-c41620bf02ac tempest-DeleteServersAdminTestJSON-318637377 tempest-DeleteServersAdminTestJSON-318637377-project-member] Got 1 possible topologies {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 546.488492] env[62503]: DEBUG nova.virt.hardware [None req-b7e47845-e371-42bf-a29f-c41620bf02ac tempest-DeleteServersAdminTestJSON-318637377 tempest-DeleteServersAdminTestJSON-318637377-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 546.488492] env[62503]: DEBUG nova.virt.hardware [None req-b7e47845-e371-42bf-a29f-c41620bf02ac tempest-DeleteServersAdminTestJSON-318637377 tempest-DeleteServersAdminTestJSON-318637377-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 546.489350] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43d1ab89-ec4c-4db2-84b3-d264e2ef8e53 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.504635] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-869ebe13-02d6-471f-bbca-aaca83943f2f {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.522272] env[62503]: DEBUG nova.virt.vmwareapi.images [None req-0b7a3a73-1934-490b-9fc5-04cb3dcd618e tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] [instance: 90e42997-a34c-4a39-8d2f-7ab0ed19f028] Downloading image file data 8150ca02-f879-471d-8913-459408f127a1 to the data store datastore1 {{(pid=62503) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 546.608229] env[62503]: DEBUG oslo_vmware.rw_handles [None req-0b7a3a73-1934-490b-9fc5-04cb3dcd618e tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/47d6f6ac-3e07-44f8-8cf2-9cc21ff9d08d/8150ca02-f879-471d-8913-459408f127a1/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62503) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 546.696325] env[62503]: DEBUG oslo_concurrency.lockutils [None req-eb0402ba-4447-4563-bad3-226e21db94d7 tempest-ServersAdminNegativeTestJSON-275877041 tempest-ServersAdminNegativeTestJSON-275877041-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 546.730218] env[62503]: DEBUG oslo_concurrency.lockutils [req-aaf04816-c12b-4595-8f4d-ca7bf81d9e1c req-e1f676a5-6083-4057-b964-6ba14f198e33 service nova] Releasing lock "refresh_cache-a62798a5-37ba-45be-be56-76e19ce3e189" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 546.754819] env[62503]: DEBUG oslo_concurrency.lockutils [None req-6237cd83-53e5-445e-991f-0fa75149e2aa tempest-TenantUsagesTestJSON-2099757901 tempest-TenantUsagesTestJSON-2099757901-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.366s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 546.756422] env[62503]: DEBUG nova.compute.manager [None req-6237cd83-53e5-445e-991f-0fa75149e2aa tempest-TenantUsagesTestJSON-2099757901 tempest-TenantUsagesTestJSON-2099757901-project-member] [instance: b570c2ad-18ac-42ea-bc2e-009992ece3fe] Start building networks asynchronously for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2832}} [ 546.758323] env[62503]: DEBUG oslo_concurrency.lockutils [None req-2175fec1-420b-4607-b7a6-fef627f43b34 tempest-ImagesOneServerNegativeTestJSON-1494729482 tempest-ImagesOneServerNegativeTestJSON-1494729482-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.221s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 546.760368] env[62503]: INFO nova.compute.claims [None req-2175fec1-420b-4607-b7a6-fef627f43b34 tempest-ImagesOneServerNegativeTestJSON-1494729482 tempest-ImagesOneServerNegativeTestJSON-1494729482-project-member] [instance: 0b1ead99-af3b-41a6-8354-bc451a51133c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 546.832169] env[62503]: DEBUG nova.network.neutron [None req-b7e47845-e371-42bf-a29f-c41620bf02ac tempest-DeleteServersAdminTestJSON-318637377 tempest-DeleteServersAdminTestJSON-318637377-project-member] [instance: f8142528-e04c-444a-a252-84e98cecee74] Successfully created port: bda1f3f8-ef2f-4b97-9bce-d4110f60c4eb {{(pid=62503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 547.215311] env[62503]: ERROR nova.compute.manager [None req-fe1e164b-4aab-4244-8a41-f3565d333c9b tempest-ServerDiagnosticsTest-976195064 tempest-ServerDiagnosticsTest-976195064-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port fe41d699-e1a6-4055-af31-ce12305bcc1c, please check neutron logs for more information. [ 547.215311] env[62503]: ERROR nova.compute.manager Traceback (most recent call last): [ 547.215311] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 547.215311] env[62503]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 547.215311] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 547.215311] env[62503]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 547.215311] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 547.215311] env[62503]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 547.215311] env[62503]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 547.215311] env[62503]: ERROR nova.compute.manager self.force_reraise() [ 547.215311] env[62503]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 547.215311] env[62503]: ERROR nova.compute.manager raise self.value [ 547.215311] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 547.215311] env[62503]: ERROR nova.compute.manager updated_port = self._update_port( [ 547.215311] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 547.215311] env[62503]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 547.216369] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 547.216369] env[62503]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 547.216369] env[62503]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port fe41d699-e1a6-4055-af31-ce12305bcc1c, please check neutron logs for more information. [ 547.216369] env[62503]: ERROR nova.compute.manager [ 547.216369] env[62503]: Traceback (most recent call last): [ 547.216369] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 547.216369] env[62503]: listener.cb(fileno) [ 547.216369] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 547.216369] env[62503]: result = function(*args, **kwargs) [ 547.216369] env[62503]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 547.216369] env[62503]: return func(*args, **kwargs) [ 547.216369] env[62503]: File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 547.216369] env[62503]: raise e [ 547.216369] env[62503]: File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 547.216369] env[62503]: nwinfo = self.network_api.allocate_for_instance( [ 547.216369] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 547.216369] env[62503]: created_port_ids = self._update_ports_for_instance( [ 547.216369] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 547.216369] env[62503]: with excutils.save_and_reraise_exception(): [ 547.216369] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 547.216369] env[62503]: self.force_reraise() [ 547.216369] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 547.216369] env[62503]: raise self.value [ 547.216369] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 547.216369] env[62503]: updated_port = self._update_port( [ 547.216369] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 547.216369] env[62503]: _ensure_no_port_binding_failure(port) [ 547.216369] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 547.216369] env[62503]: raise exception.PortBindingFailed(port_id=port['id']) [ 547.220533] env[62503]: nova.exception.PortBindingFailed: Binding failed for port fe41d699-e1a6-4055-af31-ce12305bcc1c, please check neutron logs for more information. [ 547.220533] env[62503]: Removing descriptor: 16 [ 547.220533] env[62503]: ERROR nova.compute.manager [None req-fe1e164b-4aab-4244-8a41-f3565d333c9b tempest-ServerDiagnosticsTest-976195064 tempest-ServerDiagnosticsTest-976195064-project-member] [instance: d88c42de-dafe-4bb1-bd56-a770524529f3] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port fe41d699-e1a6-4055-af31-ce12305bcc1c, please check neutron logs for more information. [ 547.220533] env[62503]: ERROR nova.compute.manager [instance: d88c42de-dafe-4bb1-bd56-a770524529f3] Traceback (most recent call last): [ 547.220533] env[62503]: ERROR nova.compute.manager [instance: d88c42de-dafe-4bb1-bd56-a770524529f3] File "/opt/stack/nova/nova/compute/manager.py", line 2897, in _build_resources [ 547.220533] env[62503]: ERROR nova.compute.manager [instance: d88c42de-dafe-4bb1-bd56-a770524529f3] yield resources [ 547.220533] env[62503]: ERROR nova.compute.manager [instance: d88c42de-dafe-4bb1-bd56-a770524529f3] File "/opt/stack/nova/nova/compute/manager.py", line 2644, in _build_and_run_instance [ 547.220533] env[62503]: ERROR nova.compute.manager [instance: d88c42de-dafe-4bb1-bd56-a770524529f3] self.driver.spawn(context, instance, image_meta, [ 547.220533] env[62503]: ERROR nova.compute.manager [instance: d88c42de-dafe-4bb1-bd56-a770524529f3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 547.220533] env[62503]: ERROR nova.compute.manager [instance: d88c42de-dafe-4bb1-bd56-a770524529f3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 547.220533] env[62503]: ERROR nova.compute.manager [instance: d88c42de-dafe-4bb1-bd56-a770524529f3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 547.220533] env[62503]: ERROR nova.compute.manager [instance: d88c42de-dafe-4bb1-bd56-a770524529f3] vm_ref = self.build_virtual_machine(instance, [ 547.220838] env[62503]: ERROR nova.compute.manager [instance: d88c42de-dafe-4bb1-bd56-a770524529f3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 547.220838] env[62503]: ERROR nova.compute.manager [instance: d88c42de-dafe-4bb1-bd56-a770524529f3] vif_infos = vmwarevif.get_vif_info(self._session, [ 547.220838] env[62503]: ERROR nova.compute.manager [instance: d88c42de-dafe-4bb1-bd56-a770524529f3] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 547.220838] env[62503]: ERROR nova.compute.manager [instance: d88c42de-dafe-4bb1-bd56-a770524529f3] for vif in network_info: [ 547.220838] env[62503]: ERROR nova.compute.manager [instance: d88c42de-dafe-4bb1-bd56-a770524529f3] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 547.220838] env[62503]: ERROR nova.compute.manager [instance: d88c42de-dafe-4bb1-bd56-a770524529f3] return self._sync_wrapper(fn, *args, **kwargs) [ 547.220838] env[62503]: ERROR nova.compute.manager [instance: d88c42de-dafe-4bb1-bd56-a770524529f3] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 547.220838] env[62503]: ERROR nova.compute.manager [instance: d88c42de-dafe-4bb1-bd56-a770524529f3] self.wait() [ 547.220838] env[62503]: ERROR nova.compute.manager [instance: d88c42de-dafe-4bb1-bd56-a770524529f3] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 547.220838] env[62503]: ERROR nova.compute.manager [instance: d88c42de-dafe-4bb1-bd56-a770524529f3] self[:] = self._gt.wait() [ 547.220838] env[62503]: ERROR nova.compute.manager [instance: d88c42de-dafe-4bb1-bd56-a770524529f3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 547.220838] env[62503]: ERROR nova.compute.manager [instance: d88c42de-dafe-4bb1-bd56-a770524529f3] return self._exit_event.wait() [ 547.220838] env[62503]: ERROR nova.compute.manager [instance: d88c42de-dafe-4bb1-bd56-a770524529f3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 547.223841] env[62503]: ERROR nova.compute.manager [instance: d88c42de-dafe-4bb1-bd56-a770524529f3] result = hub.switch() [ 547.223841] env[62503]: ERROR nova.compute.manager [instance: d88c42de-dafe-4bb1-bd56-a770524529f3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 547.223841] env[62503]: ERROR nova.compute.manager [instance: d88c42de-dafe-4bb1-bd56-a770524529f3] return self.greenlet.switch() [ 547.223841] env[62503]: ERROR nova.compute.manager [instance: d88c42de-dafe-4bb1-bd56-a770524529f3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 547.223841] env[62503]: ERROR nova.compute.manager [instance: d88c42de-dafe-4bb1-bd56-a770524529f3] result = function(*args, **kwargs) [ 547.223841] env[62503]: ERROR nova.compute.manager [instance: d88c42de-dafe-4bb1-bd56-a770524529f3] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 547.223841] env[62503]: ERROR nova.compute.manager [instance: d88c42de-dafe-4bb1-bd56-a770524529f3] return func(*args, **kwargs) [ 547.223841] env[62503]: ERROR nova.compute.manager [instance: d88c42de-dafe-4bb1-bd56-a770524529f3] File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 547.223841] env[62503]: ERROR nova.compute.manager [instance: d88c42de-dafe-4bb1-bd56-a770524529f3] raise e [ 547.223841] env[62503]: ERROR nova.compute.manager [instance: d88c42de-dafe-4bb1-bd56-a770524529f3] File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 547.223841] env[62503]: ERROR nova.compute.manager [instance: d88c42de-dafe-4bb1-bd56-a770524529f3] nwinfo = self.network_api.allocate_for_instance( [ 547.223841] env[62503]: ERROR nova.compute.manager [instance: d88c42de-dafe-4bb1-bd56-a770524529f3] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 547.223841] env[62503]: ERROR nova.compute.manager [instance: d88c42de-dafe-4bb1-bd56-a770524529f3] created_port_ids = self._update_ports_for_instance( [ 547.224174] env[62503]: ERROR nova.compute.manager [instance: d88c42de-dafe-4bb1-bd56-a770524529f3] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 547.224174] env[62503]: ERROR nova.compute.manager [instance: d88c42de-dafe-4bb1-bd56-a770524529f3] with excutils.save_and_reraise_exception(): [ 547.224174] env[62503]: ERROR nova.compute.manager [instance: d88c42de-dafe-4bb1-bd56-a770524529f3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 547.224174] env[62503]: ERROR nova.compute.manager [instance: d88c42de-dafe-4bb1-bd56-a770524529f3] self.force_reraise() [ 547.224174] env[62503]: ERROR nova.compute.manager [instance: d88c42de-dafe-4bb1-bd56-a770524529f3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 547.224174] env[62503]: ERROR nova.compute.manager [instance: d88c42de-dafe-4bb1-bd56-a770524529f3] raise self.value [ 547.224174] env[62503]: ERROR nova.compute.manager [instance: d88c42de-dafe-4bb1-bd56-a770524529f3] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 547.224174] env[62503]: ERROR nova.compute.manager [instance: d88c42de-dafe-4bb1-bd56-a770524529f3] updated_port = self._update_port( [ 547.224174] env[62503]: ERROR nova.compute.manager [instance: d88c42de-dafe-4bb1-bd56-a770524529f3] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 547.224174] env[62503]: ERROR nova.compute.manager [instance: d88c42de-dafe-4bb1-bd56-a770524529f3] _ensure_no_port_binding_failure(port) [ 547.224174] env[62503]: ERROR nova.compute.manager [instance: d88c42de-dafe-4bb1-bd56-a770524529f3] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 547.224174] env[62503]: ERROR nova.compute.manager [instance: d88c42de-dafe-4bb1-bd56-a770524529f3] raise exception.PortBindingFailed(port_id=port['id']) [ 547.224492] env[62503]: ERROR nova.compute.manager [instance: d88c42de-dafe-4bb1-bd56-a770524529f3] nova.exception.PortBindingFailed: Binding failed for port fe41d699-e1a6-4055-af31-ce12305bcc1c, please check neutron logs for more information. [ 547.224492] env[62503]: ERROR nova.compute.manager [instance: d88c42de-dafe-4bb1-bd56-a770524529f3] [ 547.224492] env[62503]: INFO nova.compute.manager [None req-fe1e164b-4aab-4244-8a41-f3565d333c9b tempest-ServerDiagnosticsTest-976195064 tempest-ServerDiagnosticsTest-976195064-project-member] [instance: d88c42de-dafe-4bb1-bd56-a770524529f3] Terminating instance [ 547.224492] env[62503]: DEBUG oslo_concurrency.lockutils [None req-fe1e164b-4aab-4244-8a41-f3565d333c9b tempest-ServerDiagnosticsTest-976195064 tempest-ServerDiagnosticsTest-976195064-project-member] Acquiring lock "refresh_cache-d88c42de-dafe-4bb1-bd56-a770524529f3" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 547.224492] env[62503]: DEBUG oslo_concurrency.lockutils [None req-fe1e164b-4aab-4244-8a41-f3565d333c9b tempest-ServerDiagnosticsTest-976195064 tempest-ServerDiagnosticsTest-976195064-project-member] Acquired lock "refresh_cache-d88c42de-dafe-4bb1-bd56-a770524529f3" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 547.224492] env[62503]: DEBUG nova.network.neutron [None req-fe1e164b-4aab-4244-8a41-f3565d333c9b tempest-ServerDiagnosticsTest-976195064 tempest-ServerDiagnosticsTest-976195064-project-member] [instance: d88c42de-dafe-4bb1-bd56-a770524529f3] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 547.267740] env[62503]: DEBUG nova.compute.utils [None req-6237cd83-53e5-445e-991f-0fa75149e2aa tempest-TenantUsagesTestJSON-2099757901 tempest-TenantUsagesTestJSON-2099757901-project-member] Using /dev/sd instead of None {{(pid=62503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 547.273737] env[62503]: DEBUG nova.compute.manager [None req-6237cd83-53e5-445e-991f-0fa75149e2aa tempest-TenantUsagesTestJSON-2099757901 tempest-TenantUsagesTestJSON-2099757901-project-member] [instance: b570c2ad-18ac-42ea-bc2e-009992ece3fe] Allocating IP information in the background. {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 547.273737] env[62503]: DEBUG nova.network.neutron [None req-6237cd83-53e5-445e-991f-0fa75149e2aa tempest-TenantUsagesTestJSON-2099757901 tempest-TenantUsagesTestJSON-2099757901-project-member] [instance: b570c2ad-18ac-42ea-bc2e-009992ece3fe] allocate_for_instance() {{(pid=62503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 547.282774] env[62503]: DEBUG oslo_vmware.rw_handles [None req-0b7a3a73-1934-490b-9fc5-04cb3dcd618e tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] Completed reading data from the image iterator. {{(pid=62503) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 547.282774] env[62503]: DEBUG oslo_vmware.rw_handles [None req-0b7a3a73-1934-490b-9fc5-04cb3dcd618e tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/47d6f6ac-3e07-44f8-8cf2-9cc21ff9d08d/8150ca02-f879-471d-8913-459408f127a1/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62503) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 547.418498] env[62503]: DEBUG nova.virt.vmwareapi.images [None req-0b7a3a73-1934-490b-9fc5-04cb3dcd618e tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] [instance: 90e42997-a34c-4a39-8d2f-7ab0ed19f028] Downloaded image file data 8150ca02-f879-471d-8913-459408f127a1 to vmware_temp/47d6f6ac-3e07-44f8-8cf2-9cc21ff9d08d/8150ca02-f879-471d-8913-459408f127a1/tmp-sparse.vmdk on the data store datastore1 {{(pid=62503) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 547.420417] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-0b7a3a73-1934-490b-9fc5-04cb3dcd618e tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] [instance: 90e42997-a34c-4a39-8d2f-7ab0ed19f028] Caching image {{(pid=62503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 547.420504] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b7a3a73-1934-490b-9fc5-04cb3dcd618e tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] Copying Virtual Disk [datastore1] vmware_temp/47d6f6ac-3e07-44f8-8cf2-9cc21ff9d08d/8150ca02-f879-471d-8913-459408f127a1/tmp-sparse.vmdk to [datastore1] vmware_temp/47d6f6ac-3e07-44f8-8cf2-9cc21ff9d08d/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk {{(pid=62503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 547.420766] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a620e0e0-82f9-4814-9d90-83bcd03afe37 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.428558] env[62503]: DEBUG oslo_vmware.api [None req-0b7a3a73-1934-490b-9fc5-04cb3dcd618e tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] Waiting for the task: (returnval){ [ 547.428558] env[62503]: value = "task-1387659" [ 547.428558] env[62503]: _type = "Task" [ 547.428558] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 547.436842] env[62503]: DEBUG nova.policy [None req-6237cd83-53e5-445e-991f-0fa75149e2aa tempest-TenantUsagesTestJSON-2099757901 tempest-TenantUsagesTestJSON-2099757901-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '243cc345d6bf4257a28f39dcd8c187d6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '776450ee7bcf43b5a901d555f7e14655', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62503) authorize /opt/stack/nova/nova/policy.py:201}} [ 547.444413] env[62503]: DEBUG oslo_vmware.api [None req-0b7a3a73-1934-490b-9fc5-04cb3dcd618e tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] Task: {'id': task-1387659, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 547.770583] env[62503]: DEBUG nova.network.neutron [None req-fe1e164b-4aab-4244-8a41-f3565d333c9b tempest-ServerDiagnosticsTest-976195064 tempest-ServerDiagnosticsTest-976195064-project-member] [instance: d88c42de-dafe-4bb1-bd56-a770524529f3] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 547.773289] env[62503]: DEBUG nova.compute.manager [None req-6237cd83-53e5-445e-991f-0fa75149e2aa tempest-TenantUsagesTestJSON-2099757901 tempest-TenantUsagesTestJSON-2099757901-project-member] [instance: b570c2ad-18ac-42ea-bc2e-009992ece3fe] Start building block device mappings for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2867}} [ 547.943099] env[62503]: DEBUG oslo_vmware.api [None req-0b7a3a73-1934-490b-9fc5-04cb3dcd618e tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] Task: {'id': task-1387659, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 547.986446] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a4b1e6e-2abe-4b14-9f1e-65f83589b289 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.994555] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5671350-ccc8-462a-834d-bfc2ad4df08c {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.039726] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e836abb-46fd-40bc-84b5-c0a82c714039 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.048576] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8ed90e0-6d33-4064-bf13-047496e41573 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.062596] env[62503]: DEBUG nova.compute.provider_tree [None req-2175fec1-420b-4607-b7a6-fef627f43b34 tempest-ImagesOneServerNegativeTestJSON-1494729482 tempest-ImagesOneServerNegativeTestJSON-1494729482-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 548.083116] env[62503]: DEBUG oslo_concurrency.lockutils [None req-14a931ce-672d-410b-8e2f-8af21632175d tempest-VolumesAdminNegativeTest-585860034 tempest-VolumesAdminNegativeTest-585860034-project-member] Acquiring lock "59fd4a4a-20f5-4b8f-970a-acfc882f45a3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 548.083361] env[62503]: DEBUG oslo_concurrency.lockutils [None req-14a931ce-672d-410b-8e2f-8af21632175d tempest-VolumesAdminNegativeTest-585860034 tempest-VolumesAdminNegativeTest-585860034-project-member] Lock "59fd4a4a-20f5-4b8f-970a-acfc882f45a3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 548.171186] env[62503]: DEBUG nova.network.neutron [None req-fe1e164b-4aab-4244-8a41-f3565d333c9b tempest-ServerDiagnosticsTest-976195064 tempest-ServerDiagnosticsTest-976195064-project-member] [instance: d88c42de-dafe-4bb1-bd56-a770524529f3] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 548.443172] env[62503]: DEBUG oslo_vmware.api [None req-0b7a3a73-1934-490b-9fc5-04cb3dcd618e tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] Task: {'id': task-1387659, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.65719} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 548.443474] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b7a3a73-1934-490b-9fc5-04cb3dcd618e tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] Copied Virtual Disk [datastore1] vmware_temp/47d6f6ac-3e07-44f8-8cf2-9cc21ff9d08d/8150ca02-f879-471d-8913-459408f127a1/tmp-sparse.vmdk to [datastore1] vmware_temp/47d6f6ac-3e07-44f8-8cf2-9cc21ff9d08d/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk {{(pid=62503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 548.443633] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-0b7a3a73-1934-490b-9fc5-04cb3dcd618e tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] Deleting the datastore file [datastore1] vmware_temp/47d6f6ac-3e07-44f8-8cf2-9cc21ff9d08d/8150ca02-f879-471d-8913-459408f127a1/tmp-sparse.vmdk {{(pid=62503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 548.443886] env[62503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fe1994e9-4146-40ea-8e8d-65b5c3605d94 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.452078] env[62503]: DEBUG oslo_vmware.api [None req-0b7a3a73-1934-490b-9fc5-04cb3dcd618e tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] Waiting for the task: (returnval){ [ 548.452078] env[62503]: value = "task-1387660" [ 548.452078] env[62503]: _type = "Task" [ 548.452078] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 548.460805] env[62503]: DEBUG oslo_vmware.api [None req-0b7a3a73-1934-490b-9fc5-04cb3dcd618e tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] Task: {'id': task-1387660, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 548.566921] env[62503]: DEBUG nova.scheduler.client.report [None req-2175fec1-420b-4607-b7a6-fef627f43b34 tempest-ImagesOneServerNegativeTestJSON-1494729482 tempest-ImagesOneServerNegativeTestJSON-1494729482-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 548.674756] env[62503]: DEBUG oslo_concurrency.lockutils [None req-fe1e164b-4aab-4244-8a41-f3565d333c9b tempest-ServerDiagnosticsTest-976195064 tempest-ServerDiagnosticsTest-976195064-project-member] Releasing lock "refresh_cache-d88c42de-dafe-4bb1-bd56-a770524529f3" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 548.675229] env[62503]: DEBUG nova.compute.manager [None req-fe1e164b-4aab-4244-8a41-f3565d333c9b tempest-ServerDiagnosticsTest-976195064 tempest-ServerDiagnosticsTest-976195064-project-member] [instance: d88c42de-dafe-4bb1-bd56-a770524529f3] Start destroying the instance on the hypervisor. {{(pid=62503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3156}} [ 548.675487] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-fe1e164b-4aab-4244-8a41-f3565d333c9b tempest-ServerDiagnosticsTest-976195064 tempest-ServerDiagnosticsTest-976195064-project-member] [instance: d88c42de-dafe-4bb1-bd56-a770524529f3] Destroying instance {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 548.675805] env[62503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7082d640-a9a9-48bb-ac24-b2d71d50ef17 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.688587] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b659ca1f-1fe0-4f64-b7de-bec02d495696 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.718892] env[62503]: WARNING nova.virt.vmwareapi.vmops [None req-fe1e164b-4aab-4244-8a41-f3565d333c9b tempest-ServerDiagnosticsTest-976195064 tempest-ServerDiagnosticsTest-976195064-project-member] [instance: d88c42de-dafe-4bb1-bd56-a770524529f3] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance d88c42de-dafe-4bb1-bd56-a770524529f3 could not be found. [ 548.719422] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-fe1e164b-4aab-4244-8a41-f3565d333c9b tempest-ServerDiagnosticsTest-976195064 tempest-ServerDiagnosticsTest-976195064-project-member] [instance: d88c42de-dafe-4bb1-bd56-a770524529f3] Instance destroyed {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 548.721069] env[62503]: INFO nova.compute.manager [None req-fe1e164b-4aab-4244-8a41-f3565d333c9b tempest-ServerDiagnosticsTest-976195064 tempest-ServerDiagnosticsTest-976195064-project-member] [instance: d88c42de-dafe-4bb1-bd56-a770524529f3] Took 0.04 seconds to destroy the instance on the hypervisor. [ 548.721069] env[62503]: DEBUG oslo.service.loopingcall [None req-fe1e164b-4aab-4244-8a41-f3565d333c9b tempest-ServerDiagnosticsTest-976195064 tempest-ServerDiagnosticsTest-976195064-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 548.721181] env[62503]: DEBUG nova.compute.manager [-] [instance: d88c42de-dafe-4bb1-bd56-a770524529f3] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 548.721260] env[62503]: DEBUG nova.network.neutron [-] [instance: d88c42de-dafe-4bb1-bd56-a770524529f3] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 548.755860] env[62503]: DEBUG nova.compute.manager [req-1bf5c136-4a66-438f-92fd-4db9822b849e req-649af2ce-f1db-4057-8bfa-6b10cb91c126 service nova] [instance: d88c42de-dafe-4bb1-bd56-a770524529f3] Received event network-changed-fe41d699-e1a6-4055-af31-ce12305bcc1c {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 548.755860] env[62503]: DEBUG nova.compute.manager [req-1bf5c136-4a66-438f-92fd-4db9822b849e req-649af2ce-f1db-4057-8bfa-6b10cb91c126 service nova] [instance: d88c42de-dafe-4bb1-bd56-a770524529f3] Refreshing instance network info cache due to event network-changed-fe41d699-e1a6-4055-af31-ce12305bcc1c. {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11432}} [ 548.755860] env[62503]: DEBUG oslo_concurrency.lockutils [req-1bf5c136-4a66-438f-92fd-4db9822b849e req-649af2ce-f1db-4057-8bfa-6b10cb91c126 service nova] Acquiring lock "refresh_cache-d88c42de-dafe-4bb1-bd56-a770524529f3" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 548.755860] env[62503]: DEBUG oslo_concurrency.lockutils [req-1bf5c136-4a66-438f-92fd-4db9822b849e req-649af2ce-f1db-4057-8bfa-6b10cb91c126 service nova] Acquired lock "refresh_cache-d88c42de-dafe-4bb1-bd56-a770524529f3" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 548.755860] env[62503]: DEBUG nova.network.neutron [req-1bf5c136-4a66-438f-92fd-4db9822b849e req-649af2ce-f1db-4057-8bfa-6b10cb91c126 service nova] [instance: d88c42de-dafe-4bb1-bd56-a770524529f3] Refreshing network info cache for port fe41d699-e1a6-4055-af31-ce12305bcc1c {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 548.775318] env[62503]: DEBUG nova.network.neutron [-] [instance: d88c42de-dafe-4bb1-bd56-a770524529f3] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 548.790827] env[62503]: DEBUG nova.compute.manager [None req-6237cd83-53e5-445e-991f-0fa75149e2aa tempest-TenantUsagesTestJSON-2099757901 tempest-TenantUsagesTestJSON-2099757901-project-member] [instance: b570c2ad-18ac-42ea-bc2e-009992ece3fe] Start spawning the instance on the hypervisor. {{(pid=62503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2641}} [ 548.822490] env[62503]: DEBUG nova.virt.hardware [None req-6237cd83-53e5-445e-991f-0fa75149e2aa tempest-TenantUsagesTestJSON-2099757901 tempest-TenantUsagesTestJSON-2099757901-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:26:20Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:26:03Z,direct_url=,disk_format='vmdk',id=8150ca02-f879-471d-8913-459408f127a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26d9ee75d25b4018b8bb1fb11c8bd98c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:26:04Z,virtual_size=,visibility=), allow threads: False {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 548.822989] env[62503]: DEBUG nova.virt.hardware [None req-6237cd83-53e5-445e-991f-0fa75149e2aa tempest-TenantUsagesTestJSON-2099757901 tempest-TenantUsagesTestJSON-2099757901-project-member] Flavor limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 548.822989] env[62503]: DEBUG nova.virt.hardware [None req-6237cd83-53e5-445e-991f-0fa75149e2aa tempest-TenantUsagesTestJSON-2099757901 tempest-TenantUsagesTestJSON-2099757901-project-member] Image limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 548.823180] env[62503]: DEBUG nova.virt.hardware [None req-6237cd83-53e5-445e-991f-0fa75149e2aa tempest-TenantUsagesTestJSON-2099757901 tempest-TenantUsagesTestJSON-2099757901-project-member] Flavor pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 548.823324] env[62503]: DEBUG nova.virt.hardware [None req-6237cd83-53e5-445e-991f-0fa75149e2aa tempest-TenantUsagesTestJSON-2099757901 tempest-TenantUsagesTestJSON-2099757901-project-member] Image pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 548.823468] env[62503]: DEBUG nova.virt.hardware [None req-6237cd83-53e5-445e-991f-0fa75149e2aa tempest-TenantUsagesTestJSON-2099757901 tempest-TenantUsagesTestJSON-2099757901-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 548.823673] env[62503]: DEBUG nova.virt.hardware [None req-6237cd83-53e5-445e-991f-0fa75149e2aa tempest-TenantUsagesTestJSON-2099757901 tempest-TenantUsagesTestJSON-2099757901-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 548.823823] env[62503]: DEBUG nova.virt.hardware [None req-6237cd83-53e5-445e-991f-0fa75149e2aa tempest-TenantUsagesTestJSON-2099757901 tempest-TenantUsagesTestJSON-2099757901-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 548.823983] env[62503]: DEBUG nova.virt.hardware [None req-6237cd83-53e5-445e-991f-0fa75149e2aa tempest-TenantUsagesTestJSON-2099757901 tempest-TenantUsagesTestJSON-2099757901-project-member] Got 1 possible topologies {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 548.824195] env[62503]: DEBUG nova.virt.hardware [None req-6237cd83-53e5-445e-991f-0fa75149e2aa tempest-TenantUsagesTestJSON-2099757901 tempest-TenantUsagesTestJSON-2099757901-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 548.824463] env[62503]: DEBUG nova.virt.hardware [None req-6237cd83-53e5-445e-991f-0fa75149e2aa tempest-TenantUsagesTestJSON-2099757901 tempest-TenantUsagesTestJSON-2099757901-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 548.826899] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17f6c909-f6d0-425d-a9bf-3de8d16f0ee5 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.834307] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3480ec94-42ec-4917-bc65-a2f3f79ab8fd {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.966616] env[62503]: DEBUG oslo_vmware.api [None req-0b7a3a73-1934-490b-9fc5-04cb3dcd618e tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] Task: {'id': task-1387660, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.032886} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 548.966719] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-0b7a3a73-1934-490b-9fc5-04cb3dcd618e tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] Deleted the datastore file {{(pid=62503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 548.966964] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-0b7a3a73-1934-490b-9fc5-04cb3dcd618e tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] Moving file from [datastore1] vmware_temp/47d6f6ac-3e07-44f8-8cf2-9cc21ff9d08d/8150ca02-f879-471d-8913-459408f127a1 to [datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1. {{(pid=62503) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:334}} [ 548.967281] env[62503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MoveDatastoreFile_Task with opID=oslo.vmware-bff1c0c2-d80e-4f45-9537-94e53bcfe313 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.975438] env[62503]: DEBUG oslo_vmware.api [None req-0b7a3a73-1934-490b-9fc5-04cb3dcd618e tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] Waiting for the task: (returnval){ [ 548.975438] env[62503]: value = "task-1387661" [ 548.975438] env[62503]: _type = "Task" [ 548.975438] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 548.985045] env[62503]: DEBUG oslo_vmware.api [None req-0b7a3a73-1934-490b-9fc5-04cb3dcd618e tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] Task: {'id': task-1387661, 'name': MoveDatastoreFile_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 549.026938] env[62503]: DEBUG nova.network.neutron [None req-6237cd83-53e5-445e-991f-0fa75149e2aa tempest-TenantUsagesTestJSON-2099757901 tempest-TenantUsagesTestJSON-2099757901-project-member] [instance: b570c2ad-18ac-42ea-bc2e-009992ece3fe] Successfully created port: 41990c02-c9ae-442d-acc0-e1d2d6642f80 {{(pid=62503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 549.075572] env[62503]: DEBUG oslo_concurrency.lockutils [None req-2175fec1-420b-4607-b7a6-fef627f43b34 tempest-ImagesOneServerNegativeTestJSON-1494729482 tempest-ImagesOneServerNegativeTestJSON-1494729482-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.317s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 549.076428] env[62503]: DEBUG nova.compute.manager [None req-2175fec1-420b-4607-b7a6-fef627f43b34 tempest-ImagesOneServerNegativeTestJSON-1494729482 tempest-ImagesOneServerNegativeTestJSON-1494729482-project-member] [instance: 0b1ead99-af3b-41a6-8354-bc451a51133c] Start building networks asynchronously for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2832}} [ 549.081050] env[62503]: DEBUG oslo_concurrency.lockutils [None req-58237dd7-a314-47a2-91b0-62504c9b4c3a tempest-MigrationsAdminTest-1483146718 tempest-MigrationsAdminTest-1483146718-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.985s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 549.084300] env[62503]: INFO nova.compute.claims [None req-58237dd7-a314-47a2-91b0-62504c9b4c3a tempest-MigrationsAdminTest-1483146718 tempest-MigrationsAdminTest-1483146718-project-member] [instance: 18c503dc-6283-4489-a69c-8dead1ec3a0d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 549.262505] env[62503]: DEBUG nova.compute.manager [req-6f98be5f-1a40-4e3a-9bd8-92035c46dc8a req-2c0f66c9-31a6-43db-877f-50e3a09a9880 service nova] [instance: a62798a5-37ba-45be-be56-76e19ce3e189] Received event network-vif-deleted-b1294331-b052-4254-aac2-3ccceb5a52ef {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 549.278579] env[62503]: DEBUG nova.network.neutron [-] [instance: d88c42de-dafe-4bb1-bd56-a770524529f3] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 549.319462] env[62503]: DEBUG nova.network.neutron [req-1bf5c136-4a66-438f-92fd-4db9822b849e req-649af2ce-f1db-4057-8bfa-6b10cb91c126 service nova] [instance: d88c42de-dafe-4bb1-bd56-a770524529f3] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 549.486283] env[62503]: DEBUG oslo_vmware.api [None req-0b7a3a73-1934-490b-9fc5-04cb3dcd618e tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] Task: {'id': task-1387661, 'name': MoveDatastoreFile_Task, 'duration_secs': 0.127168} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 549.489033] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-0b7a3a73-1934-490b-9fc5-04cb3dcd618e tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] File moved {{(pid=62503) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:346}} [ 549.489033] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-0b7a3a73-1934-490b-9fc5-04cb3dcd618e tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] [instance: 90e42997-a34c-4a39-8d2f-7ab0ed19f028] Cleaning up location [datastore1] vmware_temp/47d6f6ac-3e07-44f8-8cf2-9cc21ff9d08d {{(pid=62503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 549.489033] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-0b7a3a73-1934-490b-9fc5-04cb3dcd618e tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] Deleting the datastore file [datastore1] vmware_temp/47d6f6ac-3e07-44f8-8cf2-9cc21ff9d08d {{(pid=62503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 549.489470] env[62503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f7274416-87fc-4211-ab81-0f83dfc87e58 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.494095] env[62503]: DEBUG oslo_concurrency.lockutils [None req-c5889fb0-1c0e-4d03-b6a4-67f76941137f tempest-ServersTestFqdnHostnames-1580407543 tempest-ServersTestFqdnHostnames-1580407543-project-member] Acquiring lock "7a0b2744-2bb0-4eee-9861-418ba67b719c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 549.494381] env[62503]: DEBUG oslo_concurrency.lockutils [None req-c5889fb0-1c0e-4d03-b6a4-67f76941137f tempest-ServersTestFqdnHostnames-1580407543 tempest-ServersTestFqdnHostnames-1580407543-project-member] Lock "7a0b2744-2bb0-4eee-9861-418ba67b719c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 549.499921] env[62503]: DEBUG oslo_vmware.api [None req-0b7a3a73-1934-490b-9fc5-04cb3dcd618e tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] Waiting for the task: (returnval){ [ 549.499921] env[62503]: value = "task-1387662" [ 549.499921] env[62503]: _type = "Task" [ 549.499921] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 549.511103] env[62503]: DEBUG oslo_vmware.api [None req-0b7a3a73-1934-490b-9fc5-04cb3dcd618e tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] Task: {'id': task-1387662, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 549.590411] env[62503]: DEBUG nova.compute.utils [None req-2175fec1-420b-4607-b7a6-fef627f43b34 tempest-ImagesOneServerNegativeTestJSON-1494729482 tempest-ImagesOneServerNegativeTestJSON-1494729482-project-member] Using /dev/sd instead of None {{(pid=62503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 549.594431] env[62503]: DEBUG nova.network.neutron [req-1bf5c136-4a66-438f-92fd-4db9822b849e req-649af2ce-f1db-4057-8bfa-6b10cb91c126 service nova] [instance: d88c42de-dafe-4bb1-bd56-a770524529f3] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 549.596823] env[62503]: DEBUG nova.compute.manager [None req-2175fec1-420b-4607-b7a6-fef627f43b34 tempest-ImagesOneServerNegativeTestJSON-1494729482 tempest-ImagesOneServerNegativeTestJSON-1494729482-project-member] [instance: 0b1ead99-af3b-41a6-8354-bc451a51133c] Allocating IP information in the background. {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 549.597027] env[62503]: DEBUG nova.network.neutron [None req-2175fec1-420b-4607-b7a6-fef627f43b34 tempest-ImagesOneServerNegativeTestJSON-1494729482 tempest-ImagesOneServerNegativeTestJSON-1494729482-project-member] [instance: 0b1ead99-af3b-41a6-8354-bc451a51133c] allocate_for_instance() {{(pid=62503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 549.781479] env[62503]: INFO nova.compute.manager [-] [instance: d88c42de-dafe-4bb1-bd56-a770524529f3] Took 1.06 seconds to deallocate network for instance. [ 549.787990] env[62503]: DEBUG nova.compute.claims [None req-fe1e164b-4aab-4244-8a41-f3565d333c9b tempest-ServerDiagnosticsTest-976195064 tempest-ServerDiagnosticsTest-976195064-project-member] [instance: d88c42de-dafe-4bb1-bd56-a770524529f3] Aborting claim: {{(pid=62503) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 549.788276] env[62503]: DEBUG oslo_concurrency.lockutils [None req-fe1e164b-4aab-4244-8a41-f3565d333c9b tempest-ServerDiagnosticsTest-976195064 tempest-ServerDiagnosticsTest-976195064-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 549.845649] env[62503]: DEBUG nova.policy [None req-2175fec1-420b-4607-b7a6-fef627f43b34 tempest-ImagesOneServerNegativeTestJSON-1494729482 tempest-ImagesOneServerNegativeTestJSON-1494729482-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1f96a671b4be485ea4afc11f85a831c3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '519fdd826d274516b225a41d6c094af8', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62503) authorize /opt/stack/nova/nova/policy.py:201}} [ 550.013345] env[62503]: DEBUG oslo_vmware.api [None req-0b7a3a73-1934-490b-9fc5-04cb3dcd618e tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] Task: {'id': task-1387662, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.029613} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 550.013728] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-0b7a3a73-1934-490b-9fc5-04cb3dcd618e tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] Deleted the datastore file {{(pid=62503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 550.015555] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-941d3fbd-fedc-4203-9039-efbd00dee2e3 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.023702] env[62503]: DEBUG oslo_vmware.api [None req-0b7a3a73-1934-490b-9fc5-04cb3dcd618e tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] Waiting for the task: (returnval){ [ 550.023702] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52000ee6-a9c2-5a6f-d68b-e095300039c4" [ 550.023702] env[62503]: _type = "Task" [ 550.023702] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 550.034770] env[62503]: DEBUG oslo_vmware.api [None req-0b7a3a73-1934-490b-9fc5-04cb3dcd618e tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52000ee6-a9c2-5a6f-d68b-e095300039c4, 'name': SearchDatastore_Task, 'duration_secs': 0.00908} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 550.034770] env[62503]: DEBUG oslo_concurrency.lockutils [None req-0b7a3a73-1934-490b-9fc5-04cb3dcd618e tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] Releasing lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 550.034770] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b7a3a73-1934-490b-9fc5-04cb3dcd618e tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk to [datastore1] 90e42997-a34c-4a39-8d2f-7ab0ed19f028/90e42997-a34c-4a39-8d2f-7ab0ed19f028.vmdk {{(pid=62503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 550.034914] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-22658f65-90bf-424d-acfe-da4095776085 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.041568] env[62503]: DEBUG oslo_vmware.api [None req-0b7a3a73-1934-490b-9fc5-04cb3dcd618e tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] Waiting for the task: (returnval){ [ 550.041568] env[62503]: value = "task-1387663" [ 550.041568] env[62503]: _type = "Task" [ 550.041568] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 550.050059] env[62503]: DEBUG oslo_vmware.api [None req-0b7a3a73-1934-490b-9fc5-04cb3dcd618e tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] Task: {'id': task-1387663, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 550.098045] env[62503]: DEBUG nova.compute.manager [None req-2175fec1-420b-4607-b7a6-fef627f43b34 tempest-ImagesOneServerNegativeTestJSON-1494729482 tempest-ImagesOneServerNegativeTestJSON-1494729482-project-member] [instance: 0b1ead99-af3b-41a6-8354-bc451a51133c] Start building block device mappings for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2867}} [ 550.117839] env[62503]: DEBUG oslo_concurrency.lockutils [req-1bf5c136-4a66-438f-92fd-4db9822b849e req-649af2ce-f1db-4057-8bfa-6b10cb91c126 service nova] Releasing lock "refresh_cache-d88c42de-dafe-4bb1-bd56-a770524529f3" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 550.425687] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db2aa125-53fe-42c3-a90b-7f82aac97c9e {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.439632] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8293e8b6-fa47-4c55-94d6-ab651f91607a {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.480264] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-694cd593-b609-41ee-ba5b-69424146154a {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.489332] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c05361e0-b240-4a40-b275-7443210e0b54 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.514348] env[62503]: DEBUG nova.compute.provider_tree [None req-58237dd7-a314-47a2-91b0-62504c9b4c3a tempest-MigrationsAdminTest-1483146718 tempest-MigrationsAdminTest-1483146718-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 550.552736] env[62503]: DEBUG oslo_vmware.api [None req-0b7a3a73-1934-490b-9fc5-04cb3dcd618e tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] Task: {'id': task-1387663, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.505289} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 550.553013] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b7a3a73-1934-490b-9fc5-04cb3dcd618e tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk to [datastore1] 90e42997-a34c-4a39-8d2f-7ab0ed19f028/90e42997-a34c-4a39-8d2f-7ab0ed19f028.vmdk {{(pid=62503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 550.553315] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-0b7a3a73-1934-490b-9fc5-04cb3dcd618e tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] [instance: 90e42997-a34c-4a39-8d2f-7ab0ed19f028] Extending root virtual disk to 1048576 {{(pid=62503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 550.553639] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b544cf3d-ec30-4311-85c7-48ca9805f7a3 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.562214] env[62503]: DEBUG oslo_vmware.api [None req-0b7a3a73-1934-490b-9fc5-04cb3dcd618e tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] Waiting for the task: (returnval){ [ 550.562214] env[62503]: value = "task-1387664" [ 550.562214] env[62503]: _type = "Task" [ 550.562214] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 550.572192] env[62503]: DEBUG oslo_vmware.api [None req-0b7a3a73-1934-490b-9fc5-04cb3dcd618e tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] Task: {'id': task-1387664, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 551.020648] env[62503]: DEBUG nova.scheduler.client.report [None req-58237dd7-a314-47a2-91b0-62504c9b4c3a tempest-MigrationsAdminTest-1483146718 tempest-MigrationsAdminTest-1483146718-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 551.075354] env[62503]: DEBUG oslo_vmware.api [None req-0b7a3a73-1934-490b-9fc5-04cb3dcd618e tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] Task: {'id': task-1387664, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064983} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 551.075681] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-0b7a3a73-1934-490b-9fc5-04cb3dcd618e tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] [instance: 90e42997-a34c-4a39-8d2f-7ab0ed19f028] Extended root virtual disk {{(pid=62503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 551.076662] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-862c1b64-d90d-4d97-87ea-22b12fc717f9 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.099378] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-0b7a3a73-1934-490b-9fc5-04cb3dcd618e tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] [instance: 90e42997-a34c-4a39-8d2f-7ab0ed19f028] Reconfiguring VM instance instance-00000003 to attach disk [datastore1] 90e42997-a34c-4a39-8d2f-7ab0ed19f028/90e42997-a34c-4a39-8d2f-7ab0ed19f028.vmdk or device None with type sparse {{(pid=62503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 551.099771] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4fa3c820-1925-4d93-b28d-ded2bc8458a9 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.120763] env[62503]: DEBUG nova.compute.manager [None req-2175fec1-420b-4607-b7a6-fef627f43b34 tempest-ImagesOneServerNegativeTestJSON-1494729482 tempest-ImagesOneServerNegativeTestJSON-1494729482-project-member] [instance: 0b1ead99-af3b-41a6-8354-bc451a51133c] Start spawning the instance on the hypervisor. {{(pid=62503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2641}} [ 551.122943] env[62503]: DEBUG oslo_vmware.api [None req-0b7a3a73-1934-490b-9fc5-04cb3dcd618e tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] Waiting for the task: (returnval){ [ 551.122943] env[62503]: value = "task-1387665" [ 551.122943] env[62503]: _type = "Task" [ 551.122943] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 551.132865] env[62503]: DEBUG oslo_vmware.api [None req-0b7a3a73-1934-490b-9fc5-04cb3dcd618e tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] Task: {'id': task-1387665, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 551.147071] env[62503]: DEBUG nova.virt.hardware [None req-2175fec1-420b-4607-b7a6-fef627f43b34 tempest-ImagesOneServerNegativeTestJSON-1494729482 tempest-ImagesOneServerNegativeTestJSON-1494729482-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:26:20Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:26:03Z,direct_url=,disk_format='vmdk',id=8150ca02-f879-471d-8913-459408f127a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26d9ee75d25b4018b8bb1fb11c8bd98c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:26:04Z,virtual_size=,visibility=), allow threads: False {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 551.147366] env[62503]: DEBUG nova.virt.hardware [None req-2175fec1-420b-4607-b7a6-fef627f43b34 tempest-ImagesOneServerNegativeTestJSON-1494729482 tempest-ImagesOneServerNegativeTestJSON-1494729482-project-member] Flavor limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 551.147527] env[62503]: DEBUG nova.virt.hardware [None req-2175fec1-420b-4607-b7a6-fef627f43b34 tempest-ImagesOneServerNegativeTestJSON-1494729482 tempest-ImagesOneServerNegativeTestJSON-1494729482-project-member] Image limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 551.147710] env[62503]: DEBUG nova.virt.hardware [None req-2175fec1-420b-4607-b7a6-fef627f43b34 tempest-ImagesOneServerNegativeTestJSON-1494729482 tempest-ImagesOneServerNegativeTestJSON-1494729482-project-member] Flavor pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 551.147851] env[62503]: DEBUG nova.virt.hardware [None req-2175fec1-420b-4607-b7a6-fef627f43b34 tempest-ImagesOneServerNegativeTestJSON-1494729482 tempest-ImagesOneServerNegativeTestJSON-1494729482-project-member] Image pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 551.147993] env[62503]: DEBUG nova.virt.hardware [None req-2175fec1-420b-4607-b7a6-fef627f43b34 tempest-ImagesOneServerNegativeTestJSON-1494729482 tempest-ImagesOneServerNegativeTestJSON-1494729482-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 551.148224] env[62503]: DEBUG nova.virt.hardware [None req-2175fec1-420b-4607-b7a6-fef627f43b34 tempest-ImagesOneServerNegativeTestJSON-1494729482 tempest-ImagesOneServerNegativeTestJSON-1494729482-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 551.148384] env[62503]: DEBUG nova.virt.hardware [None req-2175fec1-420b-4607-b7a6-fef627f43b34 tempest-ImagesOneServerNegativeTestJSON-1494729482 tempest-ImagesOneServerNegativeTestJSON-1494729482-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 551.148572] env[62503]: DEBUG nova.virt.hardware [None req-2175fec1-420b-4607-b7a6-fef627f43b34 tempest-ImagesOneServerNegativeTestJSON-1494729482 tempest-ImagesOneServerNegativeTestJSON-1494729482-project-member] Got 1 possible topologies {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 551.148704] env[62503]: DEBUG nova.virt.hardware [None req-2175fec1-420b-4607-b7a6-fef627f43b34 tempest-ImagesOneServerNegativeTestJSON-1494729482 tempest-ImagesOneServerNegativeTestJSON-1494729482-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 551.148873] env[62503]: DEBUG nova.virt.hardware [None req-2175fec1-420b-4607-b7a6-fef627f43b34 tempest-ImagesOneServerNegativeTestJSON-1494729482 tempest-ImagesOneServerNegativeTestJSON-1494729482-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 551.149758] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-938ff593-a124-428d-8d5f-c189b9047625 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.159294] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d5eecf0-a5d7-4d84-b192-66e0978f0c22 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.527746] env[62503]: DEBUG oslo_concurrency.lockutils [None req-58237dd7-a314-47a2-91b0-62504c9b4c3a tempest-MigrationsAdminTest-1483146718 tempest-MigrationsAdminTest-1483146718-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.447s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 551.529130] env[62503]: DEBUG nova.compute.manager [None req-58237dd7-a314-47a2-91b0-62504c9b4c3a tempest-MigrationsAdminTest-1483146718 tempest-MigrationsAdminTest-1483146718-project-member] [instance: 18c503dc-6283-4489-a69c-8dead1ec3a0d] Start building networks asynchronously for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2832}} [ 551.532498] env[62503]: DEBUG oslo_concurrency.lockutils [None req-8e02d996-bd10-462a-bd9c-912409f6543d tempest-ServersAdminTestJSON-732764820 tempest-ServersAdminTestJSON-732764820-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.133s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 551.534097] env[62503]: INFO nova.compute.claims [None req-8e02d996-bd10-462a-bd9c-912409f6543d tempest-ServersAdminTestJSON-732764820 tempest-ServersAdminTestJSON-732764820-project-member] [instance: 24e6abd6-fb6f-49ba-b01b-3977ff205fef] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 551.634694] env[62503]: DEBUG oslo_vmware.api [None req-0b7a3a73-1934-490b-9fc5-04cb3dcd618e tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] Task: {'id': task-1387665, 'name': ReconfigVM_Task, 'duration_secs': 0.286953} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 551.634694] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-0b7a3a73-1934-490b-9fc5-04cb3dcd618e tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] [instance: 90e42997-a34c-4a39-8d2f-7ab0ed19f028] Reconfigured VM instance instance-00000003 to attach disk [datastore1] 90e42997-a34c-4a39-8d2f-7ab0ed19f028/90e42997-a34c-4a39-8d2f-7ab0ed19f028.vmdk or device None with type sparse {{(pid=62503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 551.635182] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3e322a98-9a14-411f-b494-9d2476f76951 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.642516] env[62503]: DEBUG oslo_vmware.api [None req-0b7a3a73-1934-490b-9fc5-04cb3dcd618e tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] Waiting for the task: (returnval){ [ 551.642516] env[62503]: value = "task-1387666" [ 551.642516] env[62503]: _type = "Task" [ 551.642516] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 551.655645] env[62503]: DEBUG oslo_vmware.api [None req-0b7a3a73-1934-490b-9fc5-04cb3dcd618e tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] Task: {'id': task-1387666, 'name': Rename_Task} progress is 5%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 551.696049] env[62503]: DEBUG nova.network.neutron [None req-2175fec1-420b-4607-b7a6-fef627f43b34 tempest-ImagesOneServerNegativeTestJSON-1494729482 tempest-ImagesOneServerNegativeTestJSON-1494729482-project-member] [instance: 0b1ead99-af3b-41a6-8354-bc451a51133c] Successfully created port: 85548955-452a-4861-a7b7-7b7a736f42d3 {{(pid=62503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 552.041735] env[62503]: DEBUG nova.compute.utils [None req-58237dd7-a314-47a2-91b0-62504c9b4c3a tempest-MigrationsAdminTest-1483146718 tempest-MigrationsAdminTest-1483146718-project-member] Using /dev/sd instead of None {{(pid=62503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 552.043865] env[62503]: DEBUG nova.compute.manager [None req-58237dd7-a314-47a2-91b0-62504c9b4c3a tempest-MigrationsAdminTest-1483146718 tempest-MigrationsAdminTest-1483146718-project-member] [instance: 18c503dc-6283-4489-a69c-8dead1ec3a0d] Allocating IP information in the background. {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 552.044346] env[62503]: DEBUG nova.network.neutron [None req-58237dd7-a314-47a2-91b0-62504c9b4c3a tempest-MigrationsAdminTest-1483146718 tempest-MigrationsAdminTest-1483146718-project-member] [instance: 18c503dc-6283-4489-a69c-8dead1ec3a0d] allocate_for_instance() {{(pid=62503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 552.053224] env[62503]: DEBUG oslo_concurrency.lockutils [None req-cc9648da-f367-499f-9ae4-d540c12618f7 tempest-ServerActionsTestJSON-1148358597 tempest-ServerActionsTestJSON-1148358597-project-member] Acquiring lock "d8d4c087-9b0f-48c7-bd05-291a7e2a3e83" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 552.053555] env[62503]: DEBUG oslo_concurrency.lockutils [None req-cc9648da-f367-499f-9ae4-d540c12618f7 tempest-ServerActionsTestJSON-1148358597 tempest-ServerActionsTestJSON-1148358597-project-member] Lock "d8d4c087-9b0f-48c7-bd05-291a7e2a3e83" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 552.153761] env[62503]: DEBUG oslo_vmware.api [None req-0b7a3a73-1934-490b-9fc5-04cb3dcd618e tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] Task: {'id': task-1387666, 'name': Rename_Task} progress is 14%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 552.256392] env[62503]: DEBUG nova.policy [None req-58237dd7-a314-47a2-91b0-62504c9b4c3a tempest-MigrationsAdminTest-1483146718 tempest-MigrationsAdminTest-1483146718-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '858793651d2f47148e2e3485f96d28d8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8c8b0006ec874fccae43f59e907fa6ac', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62503) authorize /opt/stack/nova/nova/policy.py:201}} [ 552.546260] env[62503]: DEBUG nova.compute.manager [None req-58237dd7-a314-47a2-91b0-62504c9b4c3a tempest-MigrationsAdminTest-1483146718 tempest-MigrationsAdminTest-1483146718-project-member] [instance: 18c503dc-6283-4489-a69c-8dead1ec3a0d] Start building block device mappings for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2867}} [ 552.655713] env[62503]: DEBUG oslo_vmware.api [None req-0b7a3a73-1934-490b-9fc5-04cb3dcd618e tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] Task: {'id': task-1387666, 'name': Rename_Task, 'duration_secs': 0.844531} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 552.658453] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b7a3a73-1934-490b-9fc5-04cb3dcd618e tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] [instance: 90e42997-a34c-4a39-8d2f-7ab0ed19f028] Powering on the VM {{(pid=62503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 552.659831] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-796f27a1-5f66-42c5-937a-95fadd500c41 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 552.663881] env[62503]: ERROR nova.compute.manager [None req-b7e47845-e371-42bf-a29f-c41620bf02ac tempest-DeleteServersAdminTestJSON-318637377 tempest-DeleteServersAdminTestJSON-318637377-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port bda1f3f8-ef2f-4b97-9bce-d4110f60c4eb, please check neutron logs for more information. [ 552.663881] env[62503]: ERROR nova.compute.manager Traceback (most recent call last): [ 552.663881] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 552.663881] env[62503]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 552.663881] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 552.663881] env[62503]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 552.663881] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 552.663881] env[62503]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 552.663881] env[62503]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 552.663881] env[62503]: ERROR nova.compute.manager self.force_reraise() [ 552.663881] env[62503]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 552.663881] env[62503]: ERROR nova.compute.manager raise self.value [ 552.663881] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 552.663881] env[62503]: ERROR nova.compute.manager updated_port = self._update_port( [ 552.663881] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 552.663881] env[62503]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 552.664366] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 552.664366] env[62503]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 552.664366] env[62503]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port bda1f3f8-ef2f-4b97-9bce-d4110f60c4eb, please check neutron logs for more information. [ 552.664366] env[62503]: ERROR nova.compute.manager [ 552.664366] env[62503]: Traceback (most recent call last): [ 552.664366] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 552.664366] env[62503]: listener.cb(fileno) [ 552.664366] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 552.664366] env[62503]: result = function(*args, **kwargs) [ 552.664366] env[62503]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 552.664366] env[62503]: return func(*args, **kwargs) [ 552.664366] env[62503]: File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 552.664366] env[62503]: raise e [ 552.664366] env[62503]: File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 552.664366] env[62503]: nwinfo = self.network_api.allocate_for_instance( [ 552.664366] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 552.664366] env[62503]: created_port_ids = self._update_ports_for_instance( [ 552.664366] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 552.664366] env[62503]: with excutils.save_and_reraise_exception(): [ 552.664366] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 552.664366] env[62503]: self.force_reraise() [ 552.664366] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 552.664366] env[62503]: raise self.value [ 552.664366] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 552.664366] env[62503]: updated_port = self._update_port( [ 552.664366] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 552.664366] env[62503]: _ensure_no_port_binding_failure(port) [ 552.664366] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 552.664366] env[62503]: raise exception.PortBindingFailed(port_id=port['id']) [ 552.665102] env[62503]: nova.exception.PortBindingFailed: Binding failed for port bda1f3f8-ef2f-4b97-9bce-d4110f60c4eb, please check neutron logs for more information. [ 552.665102] env[62503]: Removing descriptor: 14 [ 552.665102] env[62503]: ERROR nova.compute.manager [None req-b7e47845-e371-42bf-a29f-c41620bf02ac tempest-DeleteServersAdminTestJSON-318637377 tempest-DeleteServersAdminTestJSON-318637377-project-member] [instance: f8142528-e04c-444a-a252-84e98cecee74] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port bda1f3f8-ef2f-4b97-9bce-d4110f60c4eb, please check neutron logs for more information. [ 552.665102] env[62503]: ERROR nova.compute.manager [instance: f8142528-e04c-444a-a252-84e98cecee74] Traceback (most recent call last): [ 552.665102] env[62503]: ERROR nova.compute.manager [instance: f8142528-e04c-444a-a252-84e98cecee74] File "/opt/stack/nova/nova/compute/manager.py", line 2897, in _build_resources [ 552.665102] env[62503]: ERROR nova.compute.manager [instance: f8142528-e04c-444a-a252-84e98cecee74] yield resources [ 552.665102] env[62503]: ERROR nova.compute.manager [instance: f8142528-e04c-444a-a252-84e98cecee74] File "/opt/stack/nova/nova/compute/manager.py", line 2644, in _build_and_run_instance [ 552.665102] env[62503]: ERROR nova.compute.manager [instance: f8142528-e04c-444a-a252-84e98cecee74] self.driver.spawn(context, instance, image_meta, [ 552.665102] env[62503]: ERROR nova.compute.manager [instance: f8142528-e04c-444a-a252-84e98cecee74] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 552.665102] env[62503]: ERROR nova.compute.manager [instance: f8142528-e04c-444a-a252-84e98cecee74] self._vmops.spawn(context, instance, image_meta, injected_files, [ 552.665102] env[62503]: ERROR nova.compute.manager [instance: f8142528-e04c-444a-a252-84e98cecee74] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 552.665102] env[62503]: ERROR nova.compute.manager [instance: f8142528-e04c-444a-a252-84e98cecee74] vm_ref = self.build_virtual_machine(instance, [ 552.665479] env[62503]: ERROR nova.compute.manager [instance: f8142528-e04c-444a-a252-84e98cecee74] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 552.665479] env[62503]: ERROR nova.compute.manager [instance: f8142528-e04c-444a-a252-84e98cecee74] vif_infos = vmwarevif.get_vif_info(self._session, [ 552.665479] env[62503]: ERROR nova.compute.manager [instance: f8142528-e04c-444a-a252-84e98cecee74] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 552.665479] env[62503]: ERROR nova.compute.manager [instance: f8142528-e04c-444a-a252-84e98cecee74] for vif in network_info: [ 552.665479] env[62503]: ERROR nova.compute.manager [instance: f8142528-e04c-444a-a252-84e98cecee74] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 552.665479] env[62503]: ERROR nova.compute.manager [instance: f8142528-e04c-444a-a252-84e98cecee74] return self._sync_wrapper(fn, *args, **kwargs) [ 552.665479] env[62503]: ERROR nova.compute.manager [instance: f8142528-e04c-444a-a252-84e98cecee74] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 552.665479] env[62503]: ERROR nova.compute.manager [instance: f8142528-e04c-444a-a252-84e98cecee74] self.wait() [ 552.665479] env[62503]: ERROR nova.compute.manager [instance: f8142528-e04c-444a-a252-84e98cecee74] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 552.665479] env[62503]: ERROR nova.compute.manager [instance: f8142528-e04c-444a-a252-84e98cecee74] self[:] = self._gt.wait() [ 552.665479] env[62503]: ERROR nova.compute.manager [instance: f8142528-e04c-444a-a252-84e98cecee74] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 552.665479] env[62503]: ERROR nova.compute.manager [instance: f8142528-e04c-444a-a252-84e98cecee74] return self._exit_event.wait() [ 552.665479] env[62503]: ERROR nova.compute.manager [instance: f8142528-e04c-444a-a252-84e98cecee74] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 552.665823] env[62503]: ERROR nova.compute.manager [instance: f8142528-e04c-444a-a252-84e98cecee74] result = hub.switch() [ 552.665823] env[62503]: ERROR nova.compute.manager [instance: f8142528-e04c-444a-a252-84e98cecee74] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 552.665823] env[62503]: ERROR nova.compute.manager [instance: f8142528-e04c-444a-a252-84e98cecee74] return self.greenlet.switch() [ 552.665823] env[62503]: ERROR nova.compute.manager [instance: f8142528-e04c-444a-a252-84e98cecee74] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 552.665823] env[62503]: ERROR nova.compute.manager [instance: f8142528-e04c-444a-a252-84e98cecee74] result = function(*args, **kwargs) [ 552.665823] env[62503]: ERROR nova.compute.manager [instance: f8142528-e04c-444a-a252-84e98cecee74] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 552.665823] env[62503]: ERROR nova.compute.manager [instance: f8142528-e04c-444a-a252-84e98cecee74] return func(*args, **kwargs) [ 552.665823] env[62503]: ERROR nova.compute.manager [instance: f8142528-e04c-444a-a252-84e98cecee74] File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 552.665823] env[62503]: ERROR nova.compute.manager [instance: f8142528-e04c-444a-a252-84e98cecee74] raise e [ 552.665823] env[62503]: ERROR nova.compute.manager [instance: f8142528-e04c-444a-a252-84e98cecee74] File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 552.665823] env[62503]: ERROR nova.compute.manager [instance: f8142528-e04c-444a-a252-84e98cecee74] nwinfo = self.network_api.allocate_for_instance( [ 552.665823] env[62503]: ERROR nova.compute.manager [instance: f8142528-e04c-444a-a252-84e98cecee74] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 552.665823] env[62503]: ERROR nova.compute.manager [instance: f8142528-e04c-444a-a252-84e98cecee74] created_port_ids = self._update_ports_for_instance( [ 552.666173] env[62503]: ERROR nova.compute.manager [instance: f8142528-e04c-444a-a252-84e98cecee74] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 552.666173] env[62503]: ERROR nova.compute.manager [instance: f8142528-e04c-444a-a252-84e98cecee74] with excutils.save_and_reraise_exception(): [ 552.666173] env[62503]: ERROR nova.compute.manager [instance: f8142528-e04c-444a-a252-84e98cecee74] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 552.666173] env[62503]: ERROR nova.compute.manager [instance: f8142528-e04c-444a-a252-84e98cecee74] self.force_reraise() [ 552.666173] env[62503]: ERROR nova.compute.manager [instance: f8142528-e04c-444a-a252-84e98cecee74] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 552.666173] env[62503]: ERROR nova.compute.manager [instance: f8142528-e04c-444a-a252-84e98cecee74] raise self.value [ 552.666173] env[62503]: ERROR nova.compute.manager [instance: f8142528-e04c-444a-a252-84e98cecee74] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 552.666173] env[62503]: ERROR nova.compute.manager [instance: f8142528-e04c-444a-a252-84e98cecee74] updated_port = self._update_port( [ 552.666173] env[62503]: ERROR nova.compute.manager [instance: f8142528-e04c-444a-a252-84e98cecee74] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 552.666173] env[62503]: ERROR nova.compute.manager [instance: f8142528-e04c-444a-a252-84e98cecee74] _ensure_no_port_binding_failure(port) [ 552.666173] env[62503]: ERROR nova.compute.manager [instance: f8142528-e04c-444a-a252-84e98cecee74] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 552.666173] env[62503]: ERROR nova.compute.manager [instance: f8142528-e04c-444a-a252-84e98cecee74] raise exception.PortBindingFailed(port_id=port['id']) [ 552.666463] env[62503]: ERROR nova.compute.manager [instance: f8142528-e04c-444a-a252-84e98cecee74] nova.exception.PortBindingFailed: Binding failed for port bda1f3f8-ef2f-4b97-9bce-d4110f60c4eb, please check neutron logs for more information. [ 552.666463] env[62503]: ERROR nova.compute.manager [instance: f8142528-e04c-444a-a252-84e98cecee74] [ 552.666463] env[62503]: INFO nova.compute.manager [None req-b7e47845-e371-42bf-a29f-c41620bf02ac tempest-DeleteServersAdminTestJSON-318637377 tempest-DeleteServersAdminTestJSON-318637377-project-member] [instance: f8142528-e04c-444a-a252-84e98cecee74] Terminating instance [ 552.670548] env[62503]: DEBUG oslo_vmware.api [None req-0b7a3a73-1934-490b-9fc5-04cb3dcd618e tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] Waiting for the task: (returnval){ [ 552.670548] env[62503]: value = "task-1387667" [ 552.670548] env[62503]: _type = "Task" [ 552.670548] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 552.671566] env[62503]: DEBUG oslo_concurrency.lockutils [None req-b7e47845-e371-42bf-a29f-c41620bf02ac tempest-DeleteServersAdminTestJSON-318637377 tempest-DeleteServersAdminTestJSON-318637377-project-member] Acquiring lock "refresh_cache-f8142528-e04c-444a-a252-84e98cecee74" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 552.671908] env[62503]: DEBUG oslo_concurrency.lockutils [None req-b7e47845-e371-42bf-a29f-c41620bf02ac tempest-DeleteServersAdminTestJSON-318637377 tempest-DeleteServersAdminTestJSON-318637377-project-member] Acquired lock "refresh_cache-f8142528-e04c-444a-a252-84e98cecee74" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 552.672294] env[62503]: DEBUG nova.network.neutron [None req-b7e47845-e371-42bf-a29f-c41620bf02ac tempest-DeleteServersAdminTestJSON-318637377 tempest-DeleteServersAdminTestJSON-318637377-project-member] [instance: f8142528-e04c-444a-a252-84e98cecee74] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 552.678913] env[62503]: ERROR nova.compute.manager [None req-6237cd83-53e5-445e-991f-0fa75149e2aa tempest-TenantUsagesTestJSON-2099757901 tempest-TenantUsagesTestJSON-2099757901-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 41990c02-c9ae-442d-acc0-e1d2d6642f80, please check neutron logs for more information. [ 552.678913] env[62503]: ERROR nova.compute.manager Traceback (most recent call last): [ 552.678913] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 552.678913] env[62503]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 552.678913] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 552.678913] env[62503]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 552.678913] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 552.678913] env[62503]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 552.678913] env[62503]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 552.678913] env[62503]: ERROR nova.compute.manager self.force_reraise() [ 552.678913] env[62503]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 552.678913] env[62503]: ERROR nova.compute.manager raise self.value [ 552.678913] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 552.678913] env[62503]: ERROR nova.compute.manager updated_port = self._update_port( [ 552.678913] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 552.678913] env[62503]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 552.679676] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 552.679676] env[62503]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 552.679676] env[62503]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 41990c02-c9ae-442d-acc0-e1d2d6642f80, please check neutron logs for more information. [ 552.679676] env[62503]: ERROR nova.compute.manager [ 552.679676] env[62503]: Traceback (most recent call last): [ 552.679676] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 552.679676] env[62503]: listener.cb(fileno) [ 552.679676] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 552.679676] env[62503]: result = function(*args, **kwargs) [ 552.679676] env[62503]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 552.679676] env[62503]: return func(*args, **kwargs) [ 552.679676] env[62503]: File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 552.679676] env[62503]: raise e [ 552.679676] env[62503]: File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 552.679676] env[62503]: nwinfo = self.network_api.allocate_for_instance( [ 552.679676] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 552.679676] env[62503]: created_port_ids = self._update_ports_for_instance( [ 552.679676] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 552.679676] env[62503]: with excutils.save_and_reraise_exception(): [ 552.679676] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 552.679676] env[62503]: self.force_reraise() [ 552.679676] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 552.679676] env[62503]: raise self.value [ 552.679676] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 552.679676] env[62503]: updated_port = self._update_port( [ 552.679676] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 552.679676] env[62503]: _ensure_no_port_binding_failure(port) [ 552.679676] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 552.679676] env[62503]: raise exception.PortBindingFailed(port_id=port['id']) [ 552.680778] env[62503]: nova.exception.PortBindingFailed: Binding failed for port 41990c02-c9ae-442d-acc0-e1d2d6642f80, please check neutron logs for more information. [ 552.680778] env[62503]: Removing descriptor: 16 [ 552.682019] env[62503]: ERROR nova.compute.manager [None req-6237cd83-53e5-445e-991f-0fa75149e2aa tempest-TenantUsagesTestJSON-2099757901 tempest-TenantUsagesTestJSON-2099757901-project-member] [instance: b570c2ad-18ac-42ea-bc2e-009992ece3fe] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 41990c02-c9ae-442d-acc0-e1d2d6642f80, please check neutron logs for more information. [ 552.682019] env[62503]: ERROR nova.compute.manager [instance: b570c2ad-18ac-42ea-bc2e-009992ece3fe] Traceback (most recent call last): [ 552.682019] env[62503]: ERROR nova.compute.manager [instance: b570c2ad-18ac-42ea-bc2e-009992ece3fe] File "/opt/stack/nova/nova/compute/manager.py", line 2897, in _build_resources [ 552.682019] env[62503]: ERROR nova.compute.manager [instance: b570c2ad-18ac-42ea-bc2e-009992ece3fe] yield resources [ 552.682019] env[62503]: ERROR nova.compute.manager [instance: b570c2ad-18ac-42ea-bc2e-009992ece3fe] File "/opt/stack/nova/nova/compute/manager.py", line 2644, in _build_and_run_instance [ 552.682019] env[62503]: ERROR nova.compute.manager [instance: b570c2ad-18ac-42ea-bc2e-009992ece3fe] self.driver.spawn(context, instance, image_meta, [ 552.682019] env[62503]: ERROR nova.compute.manager [instance: b570c2ad-18ac-42ea-bc2e-009992ece3fe] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 552.682019] env[62503]: ERROR nova.compute.manager [instance: b570c2ad-18ac-42ea-bc2e-009992ece3fe] self._vmops.spawn(context, instance, image_meta, injected_files, [ 552.682019] env[62503]: ERROR nova.compute.manager [instance: b570c2ad-18ac-42ea-bc2e-009992ece3fe] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 552.682019] env[62503]: ERROR nova.compute.manager [instance: b570c2ad-18ac-42ea-bc2e-009992ece3fe] vm_ref = self.build_virtual_machine(instance, [ 552.682019] env[62503]: ERROR nova.compute.manager [instance: b570c2ad-18ac-42ea-bc2e-009992ece3fe] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 552.682381] env[62503]: ERROR nova.compute.manager [instance: b570c2ad-18ac-42ea-bc2e-009992ece3fe] vif_infos = vmwarevif.get_vif_info(self._session, [ 552.682381] env[62503]: ERROR nova.compute.manager [instance: b570c2ad-18ac-42ea-bc2e-009992ece3fe] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 552.682381] env[62503]: ERROR nova.compute.manager [instance: b570c2ad-18ac-42ea-bc2e-009992ece3fe] for vif in network_info: [ 552.682381] env[62503]: ERROR nova.compute.manager [instance: b570c2ad-18ac-42ea-bc2e-009992ece3fe] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 552.682381] env[62503]: ERROR nova.compute.manager [instance: b570c2ad-18ac-42ea-bc2e-009992ece3fe] return self._sync_wrapper(fn, *args, **kwargs) [ 552.682381] env[62503]: ERROR nova.compute.manager [instance: b570c2ad-18ac-42ea-bc2e-009992ece3fe] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 552.682381] env[62503]: ERROR nova.compute.manager [instance: b570c2ad-18ac-42ea-bc2e-009992ece3fe] self.wait() [ 552.682381] env[62503]: ERROR nova.compute.manager [instance: b570c2ad-18ac-42ea-bc2e-009992ece3fe] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 552.682381] env[62503]: ERROR nova.compute.manager [instance: b570c2ad-18ac-42ea-bc2e-009992ece3fe] self[:] = self._gt.wait() [ 552.682381] env[62503]: ERROR nova.compute.manager [instance: b570c2ad-18ac-42ea-bc2e-009992ece3fe] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 552.682381] env[62503]: ERROR nova.compute.manager [instance: b570c2ad-18ac-42ea-bc2e-009992ece3fe] return self._exit_event.wait() [ 552.682381] env[62503]: ERROR nova.compute.manager [instance: b570c2ad-18ac-42ea-bc2e-009992ece3fe] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 552.682381] env[62503]: ERROR nova.compute.manager [instance: b570c2ad-18ac-42ea-bc2e-009992ece3fe] result = hub.switch() [ 552.682824] env[62503]: ERROR nova.compute.manager [instance: b570c2ad-18ac-42ea-bc2e-009992ece3fe] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 552.682824] env[62503]: ERROR nova.compute.manager [instance: b570c2ad-18ac-42ea-bc2e-009992ece3fe] return self.greenlet.switch() [ 552.682824] env[62503]: ERROR nova.compute.manager [instance: b570c2ad-18ac-42ea-bc2e-009992ece3fe] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 552.682824] env[62503]: ERROR nova.compute.manager [instance: b570c2ad-18ac-42ea-bc2e-009992ece3fe] result = function(*args, **kwargs) [ 552.682824] env[62503]: ERROR nova.compute.manager [instance: b570c2ad-18ac-42ea-bc2e-009992ece3fe] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 552.682824] env[62503]: ERROR nova.compute.manager [instance: b570c2ad-18ac-42ea-bc2e-009992ece3fe] return func(*args, **kwargs) [ 552.682824] env[62503]: ERROR nova.compute.manager [instance: b570c2ad-18ac-42ea-bc2e-009992ece3fe] File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 552.682824] env[62503]: ERROR nova.compute.manager [instance: b570c2ad-18ac-42ea-bc2e-009992ece3fe] raise e [ 552.682824] env[62503]: ERROR nova.compute.manager [instance: b570c2ad-18ac-42ea-bc2e-009992ece3fe] File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 552.682824] env[62503]: ERROR nova.compute.manager [instance: b570c2ad-18ac-42ea-bc2e-009992ece3fe] nwinfo = self.network_api.allocate_for_instance( [ 552.682824] env[62503]: ERROR nova.compute.manager [instance: b570c2ad-18ac-42ea-bc2e-009992ece3fe] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 552.682824] env[62503]: ERROR nova.compute.manager [instance: b570c2ad-18ac-42ea-bc2e-009992ece3fe] created_port_ids = self._update_ports_for_instance( [ 552.682824] env[62503]: ERROR nova.compute.manager [instance: b570c2ad-18ac-42ea-bc2e-009992ece3fe] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 552.683197] env[62503]: ERROR nova.compute.manager [instance: b570c2ad-18ac-42ea-bc2e-009992ece3fe] with excutils.save_and_reraise_exception(): [ 552.683197] env[62503]: ERROR nova.compute.manager [instance: b570c2ad-18ac-42ea-bc2e-009992ece3fe] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 552.683197] env[62503]: ERROR nova.compute.manager [instance: b570c2ad-18ac-42ea-bc2e-009992ece3fe] self.force_reraise() [ 552.683197] env[62503]: ERROR nova.compute.manager [instance: b570c2ad-18ac-42ea-bc2e-009992ece3fe] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 552.683197] env[62503]: ERROR nova.compute.manager [instance: b570c2ad-18ac-42ea-bc2e-009992ece3fe] raise self.value [ 552.683197] env[62503]: ERROR nova.compute.manager [instance: b570c2ad-18ac-42ea-bc2e-009992ece3fe] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 552.683197] env[62503]: ERROR nova.compute.manager [instance: b570c2ad-18ac-42ea-bc2e-009992ece3fe] updated_port = self._update_port( [ 552.683197] env[62503]: ERROR nova.compute.manager [instance: b570c2ad-18ac-42ea-bc2e-009992ece3fe] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 552.683197] env[62503]: ERROR nova.compute.manager [instance: b570c2ad-18ac-42ea-bc2e-009992ece3fe] _ensure_no_port_binding_failure(port) [ 552.683197] env[62503]: ERROR nova.compute.manager [instance: b570c2ad-18ac-42ea-bc2e-009992ece3fe] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 552.683197] env[62503]: ERROR nova.compute.manager [instance: b570c2ad-18ac-42ea-bc2e-009992ece3fe] raise exception.PortBindingFailed(port_id=port['id']) [ 552.683197] env[62503]: ERROR nova.compute.manager [instance: b570c2ad-18ac-42ea-bc2e-009992ece3fe] nova.exception.PortBindingFailed: Binding failed for port 41990c02-c9ae-442d-acc0-e1d2d6642f80, please check neutron logs for more information. [ 552.683197] env[62503]: ERROR nova.compute.manager [instance: b570c2ad-18ac-42ea-bc2e-009992ece3fe] [ 552.683548] env[62503]: INFO nova.compute.manager [None req-6237cd83-53e5-445e-991f-0fa75149e2aa tempest-TenantUsagesTestJSON-2099757901 tempest-TenantUsagesTestJSON-2099757901-project-member] [instance: b570c2ad-18ac-42ea-bc2e-009992ece3fe] Terminating instance [ 552.687732] env[62503]: DEBUG oslo_concurrency.lockutils [None req-6237cd83-53e5-445e-991f-0fa75149e2aa tempest-TenantUsagesTestJSON-2099757901 tempest-TenantUsagesTestJSON-2099757901-project-member] Acquiring lock "refresh_cache-b570c2ad-18ac-42ea-bc2e-009992ece3fe" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 552.688011] env[62503]: DEBUG oslo_concurrency.lockutils [None req-6237cd83-53e5-445e-991f-0fa75149e2aa tempest-TenantUsagesTestJSON-2099757901 tempest-TenantUsagesTestJSON-2099757901-project-member] Acquired lock "refresh_cache-b570c2ad-18ac-42ea-bc2e-009992ece3fe" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 552.688219] env[62503]: DEBUG nova.network.neutron [None req-6237cd83-53e5-445e-991f-0fa75149e2aa tempest-TenantUsagesTestJSON-2099757901 tempest-TenantUsagesTestJSON-2099757901-project-member] [instance: b570c2ad-18ac-42ea-bc2e-009992ece3fe] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 552.698979] env[62503]: DEBUG oslo_vmware.api [None req-0b7a3a73-1934-490b-9fc5-04cb3dcd618e tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] Task: {'id': task-1387667, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 552.873502] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b02ae327-f98a-4f18-89e9-c283e71f0d0a {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 552.881893] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89e077ec-5044-43ff-bac9-60d2a94e870a {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 552.934838] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41b06d2a-d5fb-4352-82ad-d5945b9d0030 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 552.938141] env[62503]: DEBUG nova.compute.manager [req-84241bde-79e9-4479-9f86-b64801fd597b req-53470c79-ab8c-4f39-9f25-784a20d7c4ed service nova] [instance: d88c42de-dafe-4bb1-bd56-a770524529f3] Received event network-vif-deleted-fe41d699-e1a6-4055-af31-ce12305bcc1c {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 552.944420] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e9c4cef-47b3-471f-9da6-d33d522b6654 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 552.959365] env[62503]: DEBUG nova.compute.provider_tree [None req-8e02d996-bd10-462a-bd9c-912409f6543d tempest-ServersAdminTestJSON-732764820 tempest-ServersAdminTestJSON-732764820-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 553.189159] env[62503]: DEBUG oslo_vmware.api [None req-0b7a3a73-1934-490b-9fc5-04cb3dcd618e tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] Task: {'id': task-1387667, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 553.245092] env[62503]: DEBUG nova.network.neutron [None req-6237cd83-53e5-445e-991f-0fa75149e2aa tempest-TenantUsagesTestJSON-2099757901 tempest-TenantUsagesTestJSON-2099757901-project-member] [instance: b570c2ad-18ac-42ea-bc2e-009992ece3fe] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 553.284092] env[62503]: DEBUG nova.network.neutron [None req-b7e47845-e371-42bf-a29f-c41620bf02ac tempest-DeleteServersAdminTestJSON-318637377 tempest-DeleteServersAdminTestJSON-318637377-project-member] [instance: f8142528-e04c-444a-a252-84e98cecee74] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 553.335402] env[62503]: DEBUG nova.compute.manager [req-00e70bba-d95c-4429-8f29-10eb6abe2ab1 req-682870a9-ca5c-40f9-8ca5-9f9e1576ea37 service nova] [instance: f8142528-e04c-444a-a252-84e98cecee74] Received event network-changed-bda1f3f8-ef2f-4b97-9bce-d4110f60c4eb {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 553.335402] env[62503]: DEBUG nova.compute.manager [req-00e70bba-d95c-4429-8f29-10eb6abe2ab1 req-682870a9-ca5c-40f9-8ca5-9f9e1576ea37 service nova] [instance: f8142528-e04c-444a-a252-84e98cecee74] Refreshing instance network info cache due to event network-changed-bda1f3f8-ef2f-4b97-9bce-d4110f60c4eb. {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11432}} [ 553.335402] env[62503]: DEBUG oslo_concurrency.lockutils [req-00e70bba-d95c-4429-8f29-10eb6abe2ab1 req-682870a9-ca5c-40f9-8ca5-9f9e1576ea37 service nova] Acquiring lock "refresh_cache-f8142528-e04c-444a-a252-84e98cecee74" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 553.386112] env[62503]: DEBUG nova.network.neutron [None req-6237cd83-53e5-445e-991f-0fa75149e2aa tempest-TenantUsagesTestJSON-2099757901 tempest-TenantUsagesTestJSON-2099757901-project-member] [instance: b570c2ad-18ac-42ea-bc2e-009992ece3fe] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 553.462817] env[62503]: DEBUG nova.scheduler.client.report [None req-8e02d996-bd10-462a-bd9c-912409f6543d tempest-ServersAdminTestJSON-732764820 tempest-ServersAdminTestJSON-732764820-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 553.562894] env[62503]: DEBUG nova.compute.manager [None req-58237dd7-a314-47a2-91b0-62504c9b4c3a tempest-MigrationsAdminTest-1483146718 tempest-MigrationsAdminTest-1483146718-project-member] [instance: 18c503dc-6283-4489-a69c-8dead1ec3a0d] Start spawning the instance on the hypervisor. {{(pid=62503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2641}} [ 553.603526] env[62503]: DEBUG nova.virt.hardware [None req-58237dd7-a314-47a2-91b0-62504c9b4c3a tempest-MigrationsAdminTest-1483146718 tempest-MigrationsAdminTest-1483146718-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:26:20Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:26:03Z,direct_url=,disk_format='vmdk',id=8150ca02-f879-471d-8913-459408f127a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26d9ee75d25b4018b8bb1fb11c8bd98c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:26:04Z,virtual_size=,visibility=), allow threads: False {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 553.603818] env[62503]: DEBUG nova.virt.hardware [None req-58237dd7-a314-47a2-91b0-62504c9b4c3a tempest-MigrationsAdminTest-1483146718 tempest-MigrationsAdminTest-1483146718-project-member] Flavor limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 553.604149] env[62503]: DEBUG nova.virt.hardware [None req-58237dd7-a314-47a2-91b0-62504c9b4c3a tempest-MigrationsAdminTest-1483146718 tempest-MigrationsAdminTest-1483146718-project-member] Image limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 553.604554] env[62503]: DEBUG nova.virt.hardware [None req-58237dd7-a314-47a2-91b0-62504c9b4c3a tempest-MigrationsAdminTest-1483146718 tempest-MigrationsAdminTest-1483146718-project-member] Flavor pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 553.604554] env[62503]: DEBUG nova.virt.hardware [None req-58237dd7-a314-47a2-91b0-62504c9b4c3a tempest-MigrationsAdminTest-1483146718 tempest-MigrationsAdminTest-1483146718-project-member] Image pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 553.604554] env[62503]: DEBUG nova.virt.hardware [None req-58237dd7-a314-47a2-91b0-62504c9b4c3a tempest-MigrationsAdminTest-1483146718 tempest-MigrationsAdminTest-1483146718-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 553.604705] env[62503]: DEBUG nova.virt.hardware [None req-58237dd7-a314-47a2-91b0-62504c9b4c3a tempest-MigrationsAdminTest-1483146718 tempest-MigrationsAdminTest-1483146718-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 553.604785] env[62503]: DEBUG nova.virt.hardware [None req-58237dd7-a314-47a2-91b0-62504c9b4c3a tempest-MigrationsAdminTest-1483146718 tempest-MigrationsAdminTest-1483146718-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 553.605009] env[62503]: DEBUG nova.virt.hardware [None req-58237dd7-a314-47a2-91b0-62504c9b4c3a tempest-MigrationsAdminTest-1483146718 tempest-MigrationsAdminTest-1483146718-project-member] Got 1 possible topologies {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 553.605263] env[62503]: DEBUG nova.virt.hardware [None req-58237dd7-a314-47a2-91b0-62504c9b4c3a tempest-MigrationsAdminTest-1483146718 tempest-MigrationsAdminTest-1483146718-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 553.605563] env[62503]: DEBUG nova.virt.hardware [None req-58237dd7-a314-47a2-91b0-62504c9b4c3a tempest-MigrationsAdminTest-1483146718 tempest-MigrationsAdminTest-1483146718-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 553.608603] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d017509-dbe9-4d52-af11-ecc9890b37f4 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.619013] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6756d246-ec81-4b73-b76a-195cc781ae36 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.684800] env[62503]: DEBUG oslo_vmware.api [None req-0b7a3a73-1934-490b-9fc5-04cb3dcd618e tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] Task: {'id': task-1387667, 'name': PowerOnVM_Task, 'duration_secs': 0.545298} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 553.686551] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b7a3a73-1934-490b-9fc5-04cb3dcd618e tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] [instance: 90e42997-a34c-4a39-8d2f-7ab0ed19f028] Powered on the VM {{(pid=62503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 553.687648] env[62503]: INFO nova.compute.manager [None req-0b7a3a73-1934-490b-9fc5-04cb3dcd618e tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] [instance: 90e42997-a34c-4a39-8d2f-7ab0ed19f028] Took 9.56 seconds to spawn the instance on the hypervisor. [ 553.687648] env[62503]: DEBUG nova.compute.manager [None req-0b7a3a73-1934-490b-9fc5-04cb3dcd618e tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] [instance: 90e42997-a34c-4a39-8d2f-7ab0ed19f028] Checking state {{(pid=62503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1794}} [ 553.689119] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20fe8bff-47dd-4ecd-b7be-aafa3da93434 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.722077] env[62503]: DEBUG nova.network.neutron [None req-b7e47845-e371-42bf-a29f-c41620bf02ac tempest-DeleteServersAdminTestJSON-318637377 tempest-DeleteServersAdminTestJSON-318637377-project-member] [instance: f8142528-e04c-444a-a252-84e98cecee74] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 553.888749] env[62503]: DEBUG oslo_concurrency.lockutils [None req-6237cd83-53e5-445e-991f-0fa75149e2aa tempest-TenantUsagesTestJSON-2099757901 tempest-TenantUsagesTestJSON-2099757901-project-member] Releasing lock "refresh_cache-b570c2ad-18ac-42ea-bc2e-009992ece3fe" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 553.889221] env[62503]: DEBUG nova.compute.manager [None req-6237cd83-53e5-445e-991f-0fa75149e2aa tempest-TenantUsagesTestJSON-2099757901 tempest-TenantUsagesTestJSON-2099757901-project-member] [instance: b570c2ad-18ac-42ea-bc2e-009992ece3fe] Start destroying the instance on the hypervisor. {{(pid=62503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3156}} [ 553.891016] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-6237cd83-53e5-445e-991f-0fa75149e2aa tempest-TenantUsagesTestJSON-2099757901 tempest-TenantUsagesTestJSON-2099757901-project-member] [instance: b570c2ad-18ac-42ea-bc2e-009992ece3fe] Destroying instance {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 553.891016] env[62503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ecb89d29-b5e7-449e-82cb-e8436d10463f {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.899807] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce7aa289-0a1d-47f3-bdbf-eee39a3f4c7d {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.925988] env[62503]: WARNING nova.virt.vmwareapi.vmops [None req-6237cd83-53e5-445e-991f-0fa75149e2aa tempest-TenantUsagesTestJSON-2099757901 tempest-TenantUsagesTestJSON-2099757901-project-member] [instance: b570c2ad-18ac-42ea-bc2e-009992ece3fe] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance b570c2ad-18ac-42ea-bc2e-009992ece3fe could not be found. [ 553.926262] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-6237cd83-53e5-445e-991f-0fa75149e2aa tempest-TenantUsagesTestJSON-2099757901 tempest-TenantUsagesTestJSON-2099757901-project-member] [instance: b570c2ad-18ac-42ea-bc2e-009992ece3fe] Instance destroyed {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 553.927347] env[62503]: INFO nova.compute.manager [None req-6237cd83-53e5-445e-991f-0fa75149e2aa tempest-TenantUsagesTestJSON-2099757901 tempest-TenantUsagesTestJSON-2099757901-project-member] [instance: b570c2ad-18ac-42ea-bc2e-009992ece3fe] Took 0.04 seconds to destroy the instance on the hypervisor. [ 553.927347] env[62503]: DEBUG oslo.service.loopingcall [None req-6237cd83-53e5-445e-991f-0fa75149e2aa tempest-TenantUsagesTestJSON-2099757901 tempest-TenantUsagesTestJSON-2099757901-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 553.927347] env[62503]: DEBUG nova.compute.manager [-] [instance: b570c2ad-18ac-42ea-bc2e-009992ece3fe] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 553.927347] env[62503]: DEBUG nova.network.neutron [-] [instance: b570c2ad-18ac-42ea-bc2e-009992ece3fe] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 553.970930] env[62503]: DEBUG oslo_concurrency.lockutils [None req-8e02d996-bd10-462a-bd9c-912409f6543d tempest-ServersAdminTestJSON-732764820 tempest-ServersAdminTestJSON-732764820-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.438s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 553.974292] env[62503]: DEBUG nova.compute.manager [None req-8e02d996-bd10-462a-bd9c-912409f6543d tempest-ServersAdminTestJSON-732764820 tempest-ServersAdminTestJSON-732764820-project-member] [instance: 24e6abd6-fb6f-49ba-b01b-3977ff205fef] Start building networks asynchronously for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2832}} [ 553.978489] env[62503]: DEBUG oslo_concurrency.lockutils [None req-be334e62-09cf-48bb-879c-48dba1066c83 tempest-ServersAdminTestJSON-732764820 tempest-ServersAdminTestJSON-732764820-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.349s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 553.979194] env[62503]: INFO nova.compute.claims [None req-be334e62-09cf-48bb-879c-48dba1066c83 tempest-ServersAdminTestJSON-732764820 tempest-ServersAdminTestJSON-732764820-project-member] [instance: 78599fa1-be64-4797-92a9-ebc3a40b59a1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 553.982835] env[62503]: DEBUG nova.network.neutron [-] [instance: b570c2ad-18ac-42ea-bc2e-009992ece3fe] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 554.083049] env[62503]: DEBUG nova.network.neutron [None req-58237dd7-a314-47a2-91b0-62504c9b4c3a tempest-MigrationsAdminTest-1483146718 tempest-MigrationsAdminTest-1483146718-project-member] [instance: 18c503dc-6283-4489-a69c-8dead1ec3a0d] Successfully created port: ddb1371b-89f1-40c0-8e4a-2fffdc1f62eb {{(pid=62503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 554.217620] env[62503]: INFO nova.compute.manager [None req-0b7a3a73-1934-490b-9fc5-04cb3dcd618e tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] [instance: 90e42997-a34c-4a39-8d2f-7ab0ed19f028] Took 17.79 seconds to build instance. [ 554.228894] env[62503]: DEBUG oslo_concurrency.lockutils [None req-b7e47845-e371-42bf-a29f-c41620bf02ac tempest-DeleteServersAdminTestJSON-318637377 tempest-DeleteServersAdminTestJSON-318637377-project-member] Releasing lock "refresh_cache-f8142528-e04c-444a-a252-84e98cecee74" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 554.228894] env[62503]: DEBUG nova.compute.manager [None req-b7e47845-e371-42bf-a29f-c41620bf02ac tempest-DeleteServersAdminTestJSON-318637377 tempest-DeleteServersAdminTestJSON-318637377-project-member] [instance: f8142528-e04c-444a-a252-84e98cecee74] Start destroying the instance on the hypervisor. {{(pid=62503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3156}} [ 554.228894] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-b7e47845-e371-42bf-a29f-c41620bf02ac tempest-DeleteServersAdminTestJSON-318637377 tempest-DeleteServersAdminTestJSON-318637377-project-member] [instance: f8142528-e04c-444a-a252-84e98cecee74] Destroying instance {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 554.228894] env[62503]: DEBUG oslo_concurrency.lockutils [req-00e70bba-d95c-4429-8f29-10eb6abe2ab1 req-682870a9-ca5c-40f9-8ca5-9f9e1576ea37 service nova] Acquired lock "refresh_cache-f8142528-e04c-444a-a252-84e98cecee74" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 554.228894] env[62503]: DEBUG nova.network.neutron [req-00e70bba-d95c-4429-8f29-10eb6abe2ab1 req-682870a9-ca5c-40f9-8ca5-9f9e1576ea37 service nova] [instance: f8142528-e04c-444a-a252-84e98cecee74] Refreshing network info cache for port bda1f3f8-ef2f-4b97-9bce-d4110f60c4eb {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 554.229689] env[62503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-390019c3-dbad-4d43-aa58-cf68a4e4e469 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.249048] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6c6a640-9f74-422b-a726-c18acd08e3c6 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.284531] env[62503]: WARNING nova.virt.vmwareapi.vmops [None req-b7e47845-e371-42bf-a29f-c41620bf02ac tempest-DeleteServersAdminTestJSON-318637377 tempest-DeleteServersAdminTestJSON-318637377-project-member] [instance: f8142528-e04c-444a-a252-84e98cecee74] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance f8142528-e04c-444a-a252-84e98cecee74 could not be found. [ 554.284801] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-b7e47845-e371-42bf-a29f-c41620bf02ac tempest-DeleteServersAdminTestJSON-318637377 tempest-DeleteServersAdminTestJSON-318637377-project-member] [instance: f8142528-e04c-444a-a252-84e98cecee74] Instance destroyed {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 554.284984] env[62503]: INFO nova.compute.manager [None req-b7e47845-e371-42bf-a29f-c41620bf02ac tempest-DeleteServersAdminTestJSON-318637377 tempest-DeleteServersAdminTestJSON-318637377-project-member] [instance: f8142528-e04c-444a-a252-84e98cecee74] Took 0.06 seconds to destroy the instance on the hypervisor. [ 554.285563] env[62503]: DEBUG oslo.service.loopingcall [None req-b7e47845-e371-42bf-a29f-c41620bf02ac tempest-DeleteServersAdminTestJSON-318637377 tempest-DeleteServersAdminTestJSON-318637377-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 554.285811] env[62503]: DEBUG nova.compute.manager [-] [instance: f8142528-e04c-444a-a252-84e98cecee74] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 554.285904] env[62503]: DEBUG nova.network.neutron [-] [instance: f8142528-e04c-444a-a252-84e98cecee74] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 554.333470] env[62503]: DEBUG nova.network.neutron [-] [instance: f8142528-e04c-444a-a252-84e98cecee74] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 554.483679] env[62503]: DEBUG nova.compute.utils [None req-8e02d996-bd10-462a-bd9c-912409f6543d tempest-ServersAdminTestJSON-732764820 tempest-ServersAdminTestJSON-732764820-project-member] Using /dev/sd instead of None {{(pid=62503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 554.485338] env[62503]: DEBUG nova.compute.manager [None req-8e02d996-bd10-462a-bd9c-912409f6543d tempest-ServersAdminTestJSON-732764820 tempest-ServersAdminTestJSON-732764820-project-member] [instance: 24e6abd6-fb6f-49ba-b01b-3977ff205fef] Allocating IP information in the background. {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 554.485550] env[62503]: DEBUG nova.network.neutron [None req-8e02d996-bd10-462a-bd9c-912409f6543d tempest-ServersAdminTestJSON-732764820 tempest-ServersAdminTestJSON-732764820-project-member] [instance: 24e6abd6-fb6f-49ba-b01b-3977ff205fef] allocate_for_instance() {{(pid=62503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 554.490969] env[62503]: DEBUG nova.network.neutron [-] [instance: b570c2ad-18ac-42ea-bc2e-009992ece3fe] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 554.604518] env[62503]: DEBUG nova.policy [None req-8e02d996-bd10-462a-bd9c-912409f6543d tempest-ServersAdminTestJSON-732764820 tempest-ServersAdminTestJSON-732764820-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4749f588ad234b61b7858dbf572e38af', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8becefa30331447a9761794747672bd7', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62503) authorize /opt/stack/nova/nova/policy.py:201}} [ 554.725198] env[62503]: DEBUG oslo_concurrency.lockutils [None req-0b7a3a73-1934-490b-9fc5-04cb3dcd618e tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] Lock "90e42997-a34c-4a39-8d2f-7ab0ed19f028" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.311s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 554.839278] env[62503]: DEBUG nova.network.neutron [-] [instance: f8142528-e04c-444a-a252-84e98cecee74] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 554.926543] env[62503]: DEBUG nova.network.neutron [req-00e70bba-d95c-4429-8f29-10eb6abe2ab1 req-682870a9-ca5c-40f9-8ca5-9f9e1576ea37 service nova] [instance: f8142528-e04c-444a-a252-84e98cecee74] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 555.002909] env[62503]: DEBUG nova.compute.manager [None req-8e02d996-bd10-462a-bd9c-912409f6543d tempest-ServersAdminTestJSON-732764820 tempest-ServersAdminTestJSON-732764820-project-member] [instance: 24e6abd6-fb6f-49ba-b01b-3977ff205fef] Start building block device mappings for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2867}} [ 555.008328] env[62503]: INFO nova.compute.manager [-] [instance: b570c2ad-18ac-42ea-bc2e-009992ece3fe] Took 1.08 seconds to deallocate network for instance. [ 555.013251] env[62503]: DEBUG nova.compute.claims [None req-6237cd83-53e5-445e-991f-0fa75149e2aa tempest-TenantUsagesTestJSON-2099757901 tempest-TenantUsagesTestJSON-2099757901-project-member] [instance: b570c2ad-18ac-42ea-bc2e-009992ece3fe] Aborting claim: {{(pid=62503) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 555.013704] env[62503]: DEBUG oslo_concurrency.lockutils [None req-6237cd83-53e5-445e-991f-0fa75149e2aa tempest-TenantUsagesTestJSON-2099757901 tempest-TenantUsagesTestJSON-2099757901-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 555.212713] env[62503]: DEBUG nova.network.neutron [req-00e70bba-d95c-4429-8f29-10eb6abe2ab1 req-682870a9-ca5c-40f9-8ca5-9f9e1576ea37 service nova] [instance: f8142528-e04c-444a-a252-84e98cecee74] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 555.232478] env[62503]: DEBUG nova.compute.manager [None req-5be7067b-f1af-42fc-acec-92c0c71cc7a8 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] [instance: c4a88e75-690f-4bed-a4f9-a0de3b193eff] Starting instance... {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 555.240544] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f3b5e5a-82dc-4de7-8fb6-1ec392bce201 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.249100] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7982eff-390c-4dfc-b658-162df045a2a3 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.282918] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12cc5dd8-e99f-4eca-bda0-db1c64695961 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.290827] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f70320c-2066-4fe7-abde-fdaa59bbb228 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.309507] env[62503]: DEBUG nova.compute.provider_tree [None req-be334e62-09cf-48bb-879c-48dba1066c83 tempest-ServersAdminTestJSON-732764820 tempest-ServersAdminTestJSON-732764820-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 555.341768] env[62503]: INFO nova.compute.manager [-] [instance: f8142528-e04c-444a-a252-84e98cecee74] Took 1.06 seconds to deallocate network for instance. [ 555.347332] env[62503]: DEBUG nova.compute.claims [None req-b7e47845-e371-42bf-a29f-c41620bf02ac tempest-DeleteServersAdminTestJSON-318637377 tempest-DeleteServersAdminTestJSON-318637377-project-member] [instance: f8142528-e04c-444a-a252-84e98cecee74] Aborting claim: {{(pid=62503) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 555.347523] env[62503]: DEBUG oslo_concurrency.lockutils [None req-b7e47845-e371-42bf-a29f-c41620bf02ac tempest-DeleteServersAdminTestJSON-318637377 tempest-DeleteServersAdminTestJSON-318637377-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 555.510677] env[62503]: DEBUG nova.network.neutron [None req-8e02d996-bd10-462a-bd9c-912409f6543d tempest-ServersAdminTestJSON-732764820 tempest-ServersAdminTestJSON-732764820-project-member] [instance: 24e6abd6-fb6f-49ba-b01b-3977ff205fef] Successfully created port: f85bee01-59b6-479d-9ed7-a1360aedabcf {{(pid=62503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 555.717921] env[62503]: DEBUG oslo_concurrency.lockutils [req-00e70bba-d95c-4429-8f29-10eb6abe2ab1 req-682870a9-ca5c-40f9-8ca5-9f9e1576ea37 service nova] Releasing lock "refresh_cache-f8142528-e04c-444a-a252-84e98cecee74" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 555.765405] env[62503]: DEBUG oslo_concurrency.lockutils [None req-5be7067b-f1af-42fc-acec-92c0c71cc7a8 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 555.812752] env[62503]: DEBUG nova.scheduler.client.report [None req-be334e62-09cf-48bb-879c-48dba1066c83 tempest-ServersAdminTestJSON-732764820 tempest-ServersAdminTestJSON-732764820-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 556.021476] env[62503]: DEBUG nova.compute.manager [None req-8e02d996-bd10-462a-bd9c-912409f6543d tempest-ServersAdminTestJSON-732764820 tempest-ServersAdminTestJSON-732764820-project-member] [instance: 24e6abd6-fb6f-49ba-b01b-3977ff205fef] Start spawning the instance on the hypervisor. {{(pid=62503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2641}} [ 556.036059] env[62503]: ERROR nova.compute.manager [None req-2175fec1-420b-4607-b7a6-fef627f43b34 tempest-ImagesOneServerNegativeTestJSON-1494729482 tempest-ImagesOneServerNegativeTestJSON-1494729482-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 85548955-452a-4861-a7b7-7b7a736f42d3, please check neutron logs for more information. [ 556.036059] env[62503]: ERROR nova.compute.manager Traceback (most recent call last): [ 556.036059] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 556.036059] env[62503]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 556.036059] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 556.036059] env[62503]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 556.036059] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 556.036059] env[62503]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 556.036059] env[62503]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 556.036059] env[62503]: ERROR nova.compute.manager self.force_reraise() [ 556.036059] env[62503]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 556.036059] env[62503]: ERROR nova.compute.manager raise self.value [ 556.036059] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 556.036059] env[62503]: ERROR nova.compute.manager updated_port = self._update_port( [ 556.036059] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 556.036059] env[62503]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 556.036668] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 556.036668] env[62503]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 556.036668] env[62503]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 85548955-452a-4861-a7b7-7b7a736f42d3, please check neutron logs for more information. [ 556.036668] env[62503]: ERROR nova.compute.manager [ 556.036668] env[62503]: Traceback (most recent call last): [ 556.036668] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 556.036668] env[62503]: listener.cb(fileno) [ 556.036668] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 556.036668] env[62503]: result = function(*args, **kwargs) [ 556.036668] env[62503]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 556.036668] env[62503]: return func(*args, **kwargs) [ 556.036668] env[62503]: File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 556.036668] env[62503]: raise e [ 556.036668] env[62503]: File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 556.036668] env[62503]: nwinfo = self.network_api.allocate_for_instance( [ 556.036668] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 556.036668] env[62503]: created_port_ids = self._update_ports_for_instance( [ 556.036668] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 556.036668] env[62503]: with excutils.save_and_reraise_exception(): [ 556.036668] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 556.036668] env[62503]: self.force_reraise() [ 556.036668] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 556.036668] env[62503]: raise self.value [ 556.036668] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 556.036668] env[62503]: updated_port = self._update_port( [ 556.036668] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 556.036668] env[62503]: _ensure_no_port_binding_failure(port) [ 556.036668] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 556.036668] env[62503]: raise exception.PortBindingFailed(port_id=port['id']) [ 556.037360] env[62503]: nova.exception.PortBindingFailed: Binding failed for port 85548955-452a-4861-a7b7-7b7a736f42d3, please check neutron logs for more information. [ 556.037360] env[62503]: Removing descriptor: 18 [ 556.037360] env[62503]: ERROR nova.compute.manager [None req-2175fec1-420b-4607-b7a6-fef627f43b34 tempest-ImagesOneServerNegativeTestJSON-1494729482 tempest-ImagesOneServerNegativeTestJSON-1494729482-project-member] [instance: 0b1ead99-af3b-41a6-8354-bc451a51133c] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 85548955-452a-4861-a7b7-7b7a736f42d3, please check neutron logs for more information. [ 556.037360] env[62503]: ERROR nova.compute.manager [instance: 0b1ead99-af3b-41a6-8354-bc451a51133c] Traceback (most recent call last): [ 556.037360] env[62503]: ERROR nova.compute.manager [instance: 0b1ead99-af3b-41a6-8354-bc451a51133c] File "/opt/stack/nova/nova/compute/manager.py", line 2897, in _build_resources [ 556.037360] env[62503]: ERROR nova.compute.manager [instance: 0b1ead99-af3b-41a6-8354-bc451a51133c] yield resources [ 556.037360] env[62503]: ERROR nova.compute.manager [instance: 0b1ead99-af3b-41a6-8354-bc451a51133c] File "/opt/stack/nova/nova/compute/manager.py", line 2644, in _build_and_run_instance [ 556.037360] env[62503]: ERROR nova.compute.manager [instance: 0b1ead99-af3b-41a6-8354-bc451a51133c] self.driver.spawn(context, instance, image_meta, [ 556.037360] env[62503]: ERROR nova.compute.manager [instance: 0b1ead99-af3b-41a6-8354-bc451a51133c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 556.037360] env[62503]: ERROR nova.compute.manager [instance: 0b1ead99-af3b-41a6-8354-bc451a51133c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 556.037360] env[62503]: ERROR nova.compute.manager [instance: 0b1ead99-af3b-41a6-8354-bc451a51133c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 556.037360] env[62503]: ERROR nova.compute.manager [instance: 0b1ead99-af3b-41a6-8354-bc451a51133c] vm_ref = self.build_virtual_machine(instance, [ 556.037656] env[62503]: ERROR nova.compute.manager [instance: 0b1ead99-af3b-41a6-8354-bc451a51133c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 556.037656] env[62503]: ERROR nova.compute.manager [instance: 0b1ead99-af3b-41a6-8354-bc451a51133c] vif_infos = vmwarevif.get_vif_info(self._session, [ 556.037656] env[62503]: ERROR nova.compute.manager [instance: 0b1ead99-af3b-41a6-8354-bc451a51133c] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 556.037656] env[62503]: ERROR nova.compute.manager [instance: 0b1ead99-af3b-41a6-8354-bc451a51133c] for vif in network_info: [ 556.037656] env[62503]: ERROR nova.compute.manager [instance: 0b1ead99-af3b-41a6-8354-bc451a51133c] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 556.037656] env[62503]: ERROR nova.compute.manager [instance: 0b1ead99-af3b-41a6-8354-bc451a51133c] return self._sync_wrapper(fn, *args, **kwargs) [ 556.037656] env[62503]: ERROR nova.compute.manager [instance: 0b1ead99-af3b-41a6-8354-bc451a51133c] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 556.037656] env[62503]: ERROR nova.compute.manager [instance: 0b1ead99-af3b-41a6-8354-bc451a51133c] self.wait() [ 556.037656] env[62503]: ERROR nova.compute.manager [instance: 0b1ead99-af3b-41a6-8354-bc451a51133c] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 556.037656] env[62503]: ERROR nova.compute.manager [instance: 0b1ead99-af3b-41a6-8354-bc451a51133c] self[:] = self._gt.wait() [ 556.037656] env[62503]: ERROR nova.compute.manager [instance: 0b1ead99-af3b-41a6-8354-bc451a51133c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 556.037656] env[62503]: ERROR nova.compute.manager [instance: 0b1ead99-af3b-41a6-8354-bc451a51133c] return self._exit_event.wait() [ 556.037656] env[62503]: ERROR nova.compute.manager [instance: 0b1ead99-af3b-41a6-8354-bc451a51133c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 556.038018] env[62503]: ERROR nova.compute.manager [instance: 0b1ead99-af3b-41a6-8354-bc451a51133c] result = hub.switch() [ 556.038018] env[62503]: ERROR nova.compute.manager [instance: 0b1ead99-af3b-41a6-8354-bc451a51133c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 556.038018] env[62503]: ERROR nova.compute.manager [instance: 0b1ead99-af3b-41a6-8354-bc451a51133c] return self.greenlet.switch() [ 556.038018] env[62503]: ERROR nova.compute.manager [instance: 0b1ead99-af3b-41a6-8354-bc451a51133c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 556.038018] env[62503]: ERROR nova.compute.manager [instance: 0b1ead99-af3b-41a6-8354-bc451a51133c] result = function(*args, **kwargs) [ 556.038018] env[62503]: ERROR nova.compute.manager [instance: 0b1ead99-af3b-41a6-8354-bc451a51133c] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 556.038018] env[62503]: ERROR nova.compute.manager [instance: 0b1ead99-af3b-41a6-8354-bc451a51133c] return func(*args, **kwargs) [ 556.038018] env[62503]: ERROR nova.compute.manager [instance: 0b1ead99-af3b-41a6-8354-bc451a51133c] File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 556.038018] env[62503]: ERROR nova.compute.manager [instance: 0b1ead99-af3b-41a6-8354-bc451a51133c] raise e [ 556.038018] env[62503]: ERROR nova.compute.manager [instance: 0b1ead99-af3b-41a6-8354-bc451a51133c] File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 556.038018] env[62503]: ERROR nova.compute.manager [instance: 0b1ead99-af3b-41a6-8354-bc451a51133c] nwinfo = self.network_api.allocate_for_instance( [ 556.038018] env[62503]: ERROR nova.compute.manager [instance: 0b1ead99-af3b-41a6-8354-bc451a51133c] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 556.038018] env[62503]: ERROR nova.compute.manager [instance: 0b1ead99-af3b-41a6-8354-bc451a51133c] created_port_ids = self._update_ports_for_instance( [ 556.038334] env[62503]: ERROR nova.compute.manager [instance: 0b1ead99-af3b-41a6-8354-bc451a51133c] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 556.038334] env[62503]: ERROR nova.compute.manager [instance: 0b1ead99-af3b-41a6-8354-bc451a51133c] with excutils.save_and_reraise_exception(): [ 556.038334] env[62503]: ERROR nova.compute.manager [instance: 0b1ead99-af3b-41a6-8354-bc451a51133c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 556.038334] env[62503]: ERROR nova.compute.manager [instance: 0b1ead99-af3b-41a6-8354-bc451a51133c] self.force_reraise() [ 556.038334] env[62503]: ERROR nova.compute.manager [instance: 0b1ead99-af3b-41a6-8354-bc451a51133c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 556.038334] env[62503]: ERROR nova.compute.manager [instance: 0b1ead99-af3b-41a6-8354-bc451a51133c] raise self.value [ 556.038334] env[62503]: ERROR nova.compute.manager [instance: 0b1ead99-af3b-41a6-8354-bc451a51133c] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 556.038334] env[62503]: ERROR nova.compute.manager [instance: 0b1ead99-af3b-41a6-8354-bc451a51133c] updated_port = self._update_port( [ 556.038334] env[62503]: ERROR nova.compute.manager [instance: 0b1ead99-af3b-41a6-8354-bc451a51133c] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 556.038334] env[62503]: ERROR nova.compute.manager [instance: 0b1ead99-af3b-41a6-8354-bc451a51133c] _ensure_no_port_binding_failure(port) [ 556.038334] env[62503]: ERROR nova.compute.manager [instance: 0b1ead99-af3b-41a6-8354-bc451a51133c] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 556.038334] env[62503]: ERROR nova.compute.manager [instance: 0b1ead99-af3b-41a6-8354-bc451a51133c] raise exception.PortBindingFailed(port_id=port['id']) [ 556.038748] env[62503]: ERROR nova.compute.manager [instance: 0b1ead99-af3b-41a6-8354-bc451a51133c] nova.exception.PortBindingFailed: Binding failed for port 85548955-452a-4861-a7b7-7b7a736f42d3, please check neutron logs for more information. [ 556.038748] env[62503]: ERROR nova.compute.manager [instance: 0b1ead99-af3b-41a6-8354-bc451a51133c] [ 556.038748] env[62503]: INFO nova.compute.manager [None req-2175fec1-420b-4607-b7a6-fef627f43b34 tempest-ImagesOneServerNegativeTestJSON-1494729482 tempest-ImagesOneServerNegativeTestJSON-1494729482-project-member] [instance: 0b1ead99-af3b-41a6-8354-bc451a51133c] Terminating instance [ 556.041478] env[62503]: DEBUG oslo_concurrency.lockutils [None req-2175fec1-420b-4607-b7a6-fef627f43b34 tempest-ImagesOneServerNegativeTestJSON-1494729482 tempest-ImagesOneServerNegativeTestJSON-1494729482-project-member] Acquiring lock "refresh_cache-0b1ead99-af3b-41a6-8354-bc451a51133c" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 556.041646] env[62503]: DEBUG oslo_concurrency.lockutils [None req-2175fec1-420b-4607-b7a6-fef627f43b34 tempest-ImagesOneServerNegativeTestJSON-1494729482 tempest-ImagesOneServerNegativeTestJSON-1494729482-project-member] Acquired lock "refresh_cache-0b1ead99-af3b-41a6-8354-bc451a51133c" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 556.041815] env[62503]: DEBUG nova.network.neutron [None req-2175fec1-420b-4607-b7a6-fef627f43b34 tempest-ImagesOneServerNegativeTestJSON-1494729482 tempest-ImagesOneServerNegativeTestJSON-1494729482-project-member] [instance: 0b1ead99-af3b-41a6-8354-bc451a51133c] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 556.055137] env[62503]: DEBUG nova.virt.hardware [None req-8e02d996-bd10-462a-bd9c-912409f6543d tempest-ServersAdminTestJSON-732764820 tempest-ServersAdminTestJSON-732764820-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:26:20Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:26:03Z,direct_url=,disk_format='vmdk',id=8150ca02-f879-471d-8913-459408f127a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26d9ee75d25b4018b8bb1fb11c8bd98c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:26:04Z,virtual_size=,visibility=), allow threads: False {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 556.055407] env[62503]: DEBUG nova.virt.hardware [None req-8e02d996-bd10-462a-bd9c-912409f6543d tempest-ServersAdminTestJSON-732764820 tempest-ServersAdminTestJSON-732764820-project-member] Flavor limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 556.055567] env[62503]: DEBUG nova.virt.hardware [None req-8e02d996-bd10-462a-bd9c-912409f6543d tempest-ServersAdminTestJSON-732764820 tempest-ServersAdminTestJSON-732764820-project-member] Image limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 556.055749] env[62503]: DEBUG nova.virt.hardware [None req-8e02d996-bd10-462a-bd9c-912409f6543d tempest-ServersAdminTestJSON-732764820 tempest-ServersAdminTestJSON-732764820-project-member] Flavor pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 556.055893] env[62503]: DEBUG nova.virt.hardware [None req-8e02d996-bd10-462a-bd9c-912409f6543d tempest-ServersAdminTestJSON-732764820 tempest-ServersAdminTestJSON-732764820-project-member] Image pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 556.059746] env[62503]: DEBUG nova.virt.hardware [None req-8e02d996-bd10-462a-bd9c-912409f6543d tempest-ServersAdminTestJSON-732764820 tempest-ServersAdminTestJSON-732764820-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 556.059746] env[62503]: DEBUG nova.virt.hardware [None req-8e02d996-bd10-462a-bd9c-912409f6543d tempest-ServersAdminTestJSON-732764820 tempest-ServersAdminTestJSON-732764820-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 556.059746] env[62503]: DEBUG nova.virt.hardware [None req-8e02d996-bd10-462a-bd9c-912409f6543d tempest-ServersAdminTestJSON-732764820 tempest-ServersAdminTestJSON-732764820-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 556.059996] env[62503]: DEBUG nova.virt.hardware [None req-8e02d996-bd10-462a-bd9c-912409f6543d tempest-ServersAdminTestJSON-732764820 tempest-ServersAdminTestJSON-732764820-project-member] Got 1 possible topologies {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 556.060123] env[62503]: DEBUG nova.virt.hardware [None req-8e02d996-bd10-462a-bd9c-912409f6543d tempest-ServersAdminTestJSON-732764820 tempest-ServersAdminTestJSON-732764820-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 556.060289] env[62503]: DEBUG nova.virt.hardware [None req-8e02d996-bd10-462a-bd9c-912409f6543d tempest-ServersAdminTestJSON-732764820 tempest-ServersAdminTestJSON-732764820-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 556.061227] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d02c217-9892-4afb-ba46-f3adf53f3d76 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 556.078836] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73a86192-800d-48c6-bfd0-eac26b8f16ff {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 556.086682] env[62503]: DEBUG nova.compute.manager [req-fece7be0-e84e-405c-9e84-0e210104f7c9 req-b0113a57-d3b9-439e-96f7-2322b92da840 service nova] [instance: b570c2ad-18ac-42ea-bc2e-009992ece3fe] Received event network-changed-41990c02-c9ae-442d-acc0-e1d2d6642f80 {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 556.086873] env[62503]: DEBUG nova.compute.manager [req-fece7be0-e84e-405c-9e84-0e210104f7c9 req-b0113a57-d3b9-439e-96f7-2322b92da840 service nova] [instance: b570c2ad-18ac-42ea-bc2e-009992ece3fe] Refreshing instance network info cache due to event network-changed-41990c02-c9ae-442d-acc0-e1d2d6642f80. {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11432}} [ 556.087831] env[62503]: DEBUG oslo_concurrency.lockutils [req-fece7be0-e84e-405c-9e84-0e210104f7c9 req-b0113a57-d3b9-439e-96f7-2322b92da840 service nova] Acquiring lock "refresh_cache-b570c2ad-18ac-42ea-bc2e-009992ece3fe" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 556.088166] env[62503]: DEBUG oslo_concurrency.lockutils [req-fece7be0-e84e-405c-9e84-0e210104f7c9 req-b0113a57-d3b9-439e-96f7-2322b92da840 service nova] Acquired lock "refresh_cache-b570c2ad-18ac-42ea-bc2e-009992ece3fe" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 556.088166] env[62503]: DEBUG nova.network.neutron [req-fece7be0-e84e-405c-9e84-0e210104f7c9 req-b0113a57-d3b9-439e-96f7-2322b92da840 service nova] [instance: b570c2ad-18ac-42ea-bc2e-009992ece3fe] Refreshing network info cache for port 41990c02-c9ae-442d-acc0-e1d2d6642f80 {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 556.232175] env[62503]: DEBUG nova.compute.manager [req-b4ee822b-5e65-4222-b684-3f2df79a49a2 req-5924b813-5f53-4050-9e17-7977ba07bd1f service nova] [instance: f8142528-e04c-444a-a252-84e98cecee74] Received event network-vif-deleted-bda1f3f8-ef2f-4b97-9bce-d4110f60c4eb {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 556.233129] env[62503]: DEBUG nova.compute.manager [req-b4ee822b-5e65-4222-b684-3f2df79a49a2 req-5924b813-5f53-4050-9e17-7977ba07bd1f service nova] [instance: 0b1ead99-af3b-41a6-8354-bc451a51133c] Received event network-changed-85548955-452a-4861-a7b7-7b7a736f42d3 {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 556.233129] env[62503]: DEBUG nova.compute.manager [req-b4ee822b-5e65-4222-b684-3f2df79a49a2 req-5924b813-5f53-4050-9e17-7977ba07bd1f service nova] [instance: 0b1ead99-af3b-41a6-8354-bc451a51133c] Refreshing instance network info cache due to event network-changed-85548955-452a-4861-a7b7-7b7a736f42d3. {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11432}} [ 556.234025] env[62503]: DEBUG oslo_concurrency.lockutils [req-b4ee822b-5e65-4222-b684-3f2df79a49a2 req-5924b813-5f53-4050-9e17-7977ba07bd1f service nova] Acquiring lock "refresh_cache-0b1ead99-af3b-41a6-8354-bc451a51133c" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 556.320614] env[62503]: DEBUG oslo_concurrency.lockutils [None req-be334e62-09cf-48bb-879c-48dba1066c83 tempest-ServersAdminTestJSON-732764820 tempest-ServersAdminTestJSON-732764820-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.344s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 556.320614] env[62503]: DEBUG nova.compute.manager [None req-be334e62-09cf-48bb-879c-48dba1066c83 tempest-ServersAdminTestJSON-732764820 tempest-ServersAdminTestJSON-732764820-project-member] [instance: 78599fa1-be64-4797-92a9-ebc3a40b59a1] Start building networks asynchronously for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2832}} [ 556.322714] env[62503]: DEBUG oslo_concurrency.lockutils [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 10.605s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 556.323078] env[62503]: DEBUG oslo_concurrency.lockutils [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 556.323370] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62503) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 556.324251] env[62503]: DEBUG oslo_concurrency.lockutils [None req-e36023cb-3d1e-4b47-ab32-fcc44cb2b55b tempest-ServerExternalEventsTest-1279836267 tempest-ServerExternalEventsTest-1279836267-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 10.527s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 556.328574] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d051c9a4-f94f-4e94-bd61-8c780a233244 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 556.337742] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15506ca0-11bc-4a6a-9385-8ecf2c979a23 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 556.355037] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8875ab99-410b-4335-bce4-b599f272d6cb {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 556.365785] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dcc552b-a18c-47b6-ab43-77863bf5e6c7 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 556.401070] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181421MB free_disk=175GB free_vcpus=48 pci_devices=None {{(pid=62503) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 556.401070] env[62503]: DEBUG oslo_concurrency.lockutils [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 556.597992] env[62503]: DEBUG nova.network.neutron [None req-2175fec1-420b-4607-b7a6-fef627f43b34 tempest-ImagesOneServerNegativeTestJSON-1494729482 tempest-ImagesOneServerNegativeTestJSON-1494729482-project-member] [instance: 0b1ead99-af3b-41a6-8354-bc451a51133c] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 556.682768] env[62503]: DEBUG nova.network.neutron [req-fece7be0-e84e-405c-9e84-0e210104f7c9 req-b0113a57-d3b9-439e-96f7-2322b92da840 service nova] [instance: b570c2ad-18ac-42ea-bc2e-009992ece3fe] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 556.831022] env[62503]: DEBUG nova.compute.utils [None req-be334e62-09cf-48bb-879c-48dba1066c83 tempest-ServersAdminTestJSON-732764820 tempest-ServersAdminTestJSON-732764820-project-member] Using /dev/sd instead of None {{(pid=62503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 556.831022] env[62503]: DEBUG nova.compute.manager [None req-be334e62-09cf-48bb-879c-48dba1066c83 tempest-ServersAdminTestJSON-732764820 tempest-ServersAdminTestJSON-732764820-project-member] [instance: 78599fa1-be64-4797-92a9-ebc3a40b59a1] Allocating IP information in the background. {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 556.831022] env[62503]: DEBUG nova.network.neutron [None req-be334e62-09cf-48bb-879c-48dba1066c83 tempest-ServersAdminTestJSON-732764820 tempest-ServersAdminTestJSON-732764820-project-member] [instance: 78599fa1-be64-4797-92a9-ebc3a40b59a1] allocate_for_instance() {{(pid=62503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 557.034208] env[62503]: DEBUG nova.policy [None req-be334e62-09cf-48bb-879c-48dba1066c83 tempest-ServersAdminTestJSON-732764820 tempest-ServersAdminTestJSON-732764820-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4749f588ad234b61b7858dbf572e38af', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8becefa30331447a9761794747672bd7', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62503) authorize /opt/stack/nova/nova/policy.py:201}} [ 557.092236] env[62503]: DEBUG nova.network.neutron [req-fece7be0-e84e-405c-9e84-0e210104f7c9 req-b0113a57-d3b9-439e-96f7-2322b92da840 service nova] [instance: b570c2ad-18ac-42ea-bc2e-009992ece3fe] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 557.098580] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74d14f69-f32e-41c2-8b25-5f345057be04 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.116875] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e14fedae-807b-49a1-822d-a00059f103d0 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.153417] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04bd5132-2501-4029-8039-5ba49066a46d {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.160967] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-355b6bcc-b514-4077-8539-df45f324e3b7 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.176959] env[62503]: DEBUG nova.compute.provider_tree [None req-e36023cb-3d1e-4b47-ab32-fcc44cb2b55b tempest-ServerExternalEventsTest-1279836267 tempest-ServerExternalEventsTest-1279836267-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 557.201920] env[62503]: DEBUG nova.network.neutron [None req-2175fec1-420b-4607-b7a6-fef627f43b34 tempest-ImagesOneServerNegativeTestJSON-1494729482 tempest-ImagesOneServerNegativeTestJSON-1494729482-project-member] [instance: 0b1ead99-af3b-41a6-8354-bc451a51133c] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 557.339513] env[62503]: DEBUG nova.compute.manager [None req-be334e62-09cf-48bb-879c-48dba1066c83 tempest-ServersAdminTestJSON-732764820 tempest-ServersAdminTestJSON-732764820-project-member] [instance: 78599fa1-be64-4797-92a9-ebc3a40b59a1] Start building block device mappings for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2867}} [ 557.595881] env[62503]: DEBUG oslo_concurrency.lockutils [req-fece7be0-e84e-405c-9e84-0e210104f7c9 req-b0113a57-d3b9-439e-96f7-2322b92da840 service nova] Releasing lock "refresh_cache-b570c2ad-18ac-42ea-bc2e-009992ece3fe" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 557.595881] env[62503]: DEBUG nova.compute.manager [req-fece7be0-e84e-405c-9e84-0e210104f7c9 req-b0113a57-d3b9-439e-96f7-2322b92da840 service nova] [instance: b570c2ad-18ac-42ea-bc2e-009992ece3fe] Received event network-vif-deleted-41990c02-c9ae-442d-acc0-e1d2d6642f80 {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 557.682341] env[62503]: DEBUG nova.scheduler.client.report [None req-e36023cb-3d1e-4b47-ab32-fcc44cb2b55b tempest-ServerExternalEventsTest-1279836267 tempest-ServerExternalEventsTest-1279836267-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 557.705176] env[62503]: DEBUG oslo_concurrency.lockutils [None req-2175fec1-420b-4607-b7a6-fef627f43b34 tempest-ImagesOneServerNegativeTestJSON-1494729482 tempest-ImagesOneServerNegativeTestJSON-1494729482-project-member] Releasing lock "refresh_cache-0b1ead99-af3b-41a6-8354-bc451a51133c" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 557.706805] env[62503]: DEBUG nova.compute.manager [None req-2175fec1-420b-4607-b7a6-fef627f43b34 tempest-ImagesOneServerNegativeTestJSON-1494729482 tempest-ImagesOneServerNegativeTestJSON-1494729482-project-member] [instance: 0b1ead99-af3b-41a6-8354-bc451a51133c] Start destroying the instance on the hypervisor. {{(pid=62503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3156}} [ 557.706805] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-2175fec1-420b-4607-b7a6-fef627f43b34 tempest-ImagesOneServerNegativeTestJSON-1494729482 tempest-ImagesOneServerNegativeTestJSON-1494729482-project-member] [instance: 0b1ead99-af3b-41a6-8354-bc451a51133c] Destroying instance {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 557.707067] env[62503]: DEBUG oslo_concurrency.lockutils [req-b4ee822b-5e65-4222-b684-3f2df79a49a2 req-5924b813-5f53-4050-9e17-7977ba07bd1f service nova] Acquired lock "refresh_cache-0b1ead99-af3b-41a6-8354-bc451a51133c" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 557.707832] env[62503]: DEBUG nova.network.neutron [req-b4ee822b-5e65-4222-b684-3f2df79a49a2 req-5924b813-5f53-4050-9e17-7977ba07bd1f service nova] [instance: 0b1ead99-af3b-41a6-8354-bc451a51133c] Refreshing network info cache for port 85548955-452a-4861-a7b7-7b7a736f42d3 {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 557.709831] env[62503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-002db9f4-6908-46cc-bfcd-0b5cb3483854 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.723829] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fba7db2-ad1a-4503-9fee-bf8f6bb2458d {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.740033] env[62503]: DEBUG nova.compute.manager [None req-69d4c25e-2b1a-4c9d-b6ea-e56fdd315b42 tempest-ServerDiagnosticsV248Test-922741278 tempest-ServerDiagnosticsV248Test-922741278-project-admin] [instance: 90e42997-a34c-4a39-8d2f-7ab0ed19f028] Checking state {{(pid=62503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1794}} [ 557.741989] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe56f100-2597-4d96-8303-9828992f123c {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.748405] env[62503]: INFO nova.compute.manager [None req-69d4c25e-2b1a-4c9d-b6ea-e56fdd315b42 tempest-ServerDiagnosticsV248Test-922741278 tempest-ServerDiagnosticsV248Test-922741278-project-admin] [instance: 90e42997-a34c-4a39-8d2f-7ab0ed19f028] Retrieving diagnostics [ 557.754643] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83af46f0-5ac9-4894-b67c-9ffd8c630adc {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.758477] env[62503]: WARNING nova.virt.vmwareapi.vmops [None req-2175fec1-420b-4607-b7a6-fef627f43b34 tempest-ImagesOneServerNegativeTestJSON-1494729482 tempest-ImagesOneServerNegativeTestJSON-1494729482-project-member] [instance: 0b1ead99-af3b-41a6-8354-bc451a51133c] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 0b1ead99-af3b-41a6-8354-bc451a51133c could not be found. [ 557.758772] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-2175fec1-420b-4607-b7a6-fef627f43b34 tempest-ImagesOneServerNegativeTestJSON-1494729482 tempest-ImagesOneServerNegativeTestJSON-1494729482-project-member] [instance: 0b1ead99-af3b-41a6-8354-bc451a51133c] Instance destroyed {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 557.758923] env[62503]: INFO nova.compute.manager [None req-2175fec1-420b-4607-b7a6-fef627f43b34 tempest-ImagesOneServerNegativeTestJSON-1494729482 tempest-ImagesOneServerNegativeTestJSON-1494729482-project-member] [instance: 0b1ead99-af3b-41a6-8354-bc451a51133c] Took 0.05 seconds to destroy the instance on the hypervisor. [ 557.759179] env[62503]: DEBUG oslo.service.loopingcall [None req-2175fec1-420b-4607-b7a6-fef627f43b34 tempest-ImagesOneServerNegativeTestJSON-1494729482 tempest-ImagesOneServerNegativeTestJSON-1494729482-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 557.759419] env[62503]: DEBUG nova.compute.manager [-] [instance: 0b1ead99-af3b-41a6-8354-bc451a51133c] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 557.759519] env[62503]: DEBUG nova.network.neutron [-] [instance: 0b1ead99-af3b-41a6-8354-bc451a51133c] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 557.808619] env[62503]: DEBUG nova.network.neutron [-] [instance: 0b1ead99-af3b-41a6-8354-bc451a51133c] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 558.185657] env[62503]: DEBUG oslo_concurrency.lockutils [None req-e36023cb-3d1e-4b47-ab32-fcc44cb2b55b tempest-ServerExternalEventsTest-1279836267 tempest-ServerExternalEventsTest-1279836267-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.862s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 558.187462] env[62503]: ERROR nova.compute.manager [None req-e36023cb-3d1e-4b47-ab32-fcc44cb2b55b tempest-ServerExternalEventsTest-1279836267 tempest-ServerExternalEventsTest-1279836267-project-member] [instance: a62798a5-37ba-45be-be56-76e19ce3e189] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port b1294331-b052-4254-aac2-3ccceb5a52ef, please check neutron logs for more information. [ 558.187462] env[62503]: ERROR nova.compute.manager [instance: a62798a5-37ba-45be-be56-76e19ce3e189] Traceback (most recent call last): [ 558.187462] env[62503]: ERROR nova.compute.manager [instance: a62798a5-37ba-45be-be56-76e19ce3e189] File "/opt/stack/nova/nova/compute/manager.py", line 2644, in _build_and_run_instance [ 558.187462] env[62503]: ERROR nova.compute.manager [instance: a62798a5-37ba-45be-be56-76e19ce3e189] self.driver.spawn(context, instance, image_meta, [ 558.187462] env[62503]: ERROR nova.compute.manager [instance: a62798a5-37ba-45be-be56-76e19ce3e189] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 558.187462] env[62503]: ERROR nova.compute.manager [instance: a62798a5-37ba-45be-be56-76e19ce3e189] self._vmops.spawn(context, instance, image_meta, injected_files, [ 558.187462] env[62503]: ERROR nova.compute.manager [instance: a62798a5-37ba-45be-be56-76e19ce3e189] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 558.187462] env[62503]: ERROR nova.compute.manager [instance: a62798a5-37ba-45be-be56-76e19ce3e189] vm_ref = self.build_virtual_machine(instance, [ 558.187462] env[62503]: ERROR nova.compute.manager [instance: a62798a5-37ba-45be-be56-76e19ce3e189] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 558.187462] env[62503]: ERROR nova.compute.manager [instance: a62798a5-37ba-45be-be56-76e19ce3e189] vif_infos = vmwarevif.get_vif_info(self._session, [ 558.187462] env[62503]: ERROR nova.compute.manager [instance: a62798a5-37ba-45be-be56-76e19ce3e189] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 558.187808] env[62503]: ERROR nova.compute.manager [instance: a62798a5-37ba-45be-be56-76e19ce3e189] for vif in network_info: [ 558.187808] env[62503]: ERROR nova.compute.manager [instance: a62798a5-37ba-45be-be56-76e19ce3e189] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 558.187808] env[62503]: ERROR nova.compute.manager [instance: a62798a5-37ba-45be-be56-76e19ce3e189] return self._sync_wrapper(fn, *args, **kwargs) [ 558.187808] env[62503]: ERROR nova.compute.manager [instance: a62798a5-37ba-45be-be56-76e19ce3e189] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 558.187808] env[62503]: ERROR nova.compute.manager [instance: a62798a5-37ba-45be-be56-76e19ce3e189] self.wait() [ 558.187808] env[62503]: ERROR nova.compute.manager [instance: a62798a5-37ba-45be-be56-76e19ce3e189] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 558.187808] env[62503]: ERROR nova.compute.manager [instance: a62798a5-37ba-45be-be56-76e19ce3e189] self[:] = self._gt.wait() [ 558.187808] env[62503]: ERROR nova.compute.manager [instance: a62798a5-37ba-45be-be56-76e19ce3e189] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 558.187808] env[62503]: ERROR nova.compute.manager [instance: a62798a5-37ba-45be-be56-76e19ce3e189] return self._exit_event.wait() [ 558.187808] env[62503]: ERROR nova.compute.manager [instance: a62798a5-37ba-45be-be56-76e19ce3e189] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 558.187808] env[62503]: ERROR nova.compute.manager [instance: a62798a5-37ba-45be-be56-76e19ce3e189] result = hub.switch() [ 558.187808] env[62503]: ERROR nova.compute.manager [instance: a62798a5-37ba-45be-be56-76e19ce3e189] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 558.187808] env[62503]: ERROR nova.compute.manager [instance: a62798a5-37ba-45be-be56-76e19ce3e189] return self.greenlet.switch() [ 558.188246] env[62503]: ERROR nova.compute.manager [instance: a62798a5-37ba-45be-be56-76e19ce3e189] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 558.188246] env[62503]: ERROR nova.compute.manager [instance: a62798a5-37ba-45be-be56-76e19ce3e189] result = function(*args, **kwargs) [ 558.188246] env[62503]: ERROR nova.compute.manager [instance: a62798a5-37ba-45be-be56-76e19ce3e189] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 558.188246] env[62503]: ERROR nova.compute.manager [instance: a62798a5-37ba-45be-be56-76e19ce3e189] return func(*args, **kwargs) [ 558.188246] env[62503]: ERROR nova.compute.manager [instance: a62798a5-37ba-45be-be56-76e19ce3e189] File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 558.188246] env[62503]: ERROR nova.compute.manager [instance: a62798a5-37ba-45be-be56-76e19ce3e189] raise e [ 558.188246] env[62503]: ERROR nova.compute.manager [instance: a62798a5-37ba-45be-be56-76e19ce3e189] File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 558.188246] env[62503]: ERROR nova.compute.manager [instance: a62798a5-37ba-45be-be56-76e19ce3e189] nwinfo = self.network_api.allocate_for_instance( [ 558.188246] env[62503]: ERROR nova.compute.manager [instance: a62798a5-37ba-45be-be56-76e19ce3e189] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 558.188246] env[62503]: ERROR nova.compute.manager [instance: a62798a5-37ba-45be-be56-76e19ce3e189] created_port_ids = self._update_ports_for_instance( [ 558.188246] env[62503]: ERROR nova.compute.manager [instance: a62798a5-37ba-45be-be56-76e19ce3e189] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 558.188246] env[62503]: ERROR nova.compute.manager [instance: a62798a5-37ba-45be-be56-76e19ce3e189] with excutils.save_and_reraise_exception(): [ 558.188246] env[62503]: ERROR nova.compute.manager [instance: a62798a5-37ba-45be-be56-76e19ce3e189] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 558.188693] env[62503]: ERROR nova.compute.manager [instance: a62798a5-37ba-45be-be56-76e19ce3e189] self.force_reraise() [ 558.188693] env[62503]: ERROR nova.compute.manager [instance: a62798a5-37ba-45be-be56-76e19ce3e189] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 558.188693] env[62503]: ERROR nova.compute.manager [instance: a62798a5-37ba-45be-be56-76e19ce3e189] raise self.value [ 558.188693] env[62503]: ERROR nova.compute.manager [instance: a62798a5-37ba-45be-be56-76e19ce3e189] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 558.188693] env[62503]: ERROR nova.compute.manager [instance: a62798a5-37ba-45be-be56-76e19ce3e189] updated_port = self._update_port( [ 558.188693] env[62503]: ERROR nova.compute.manager [instance: a62798a5-37ba-45be-be56-76e19ce3e189] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 558.188693] env[62503]: ERROR nova.compute.manager [instance: a62798a5-37ba-45be-be56-76e19ce3e189] _ensure_no_port_binding_failure(port) [ 558.188693] env[62503]: ERROR nova.compute.manager [instance: a62798a5-37ba-45be-be56-76e19ce3e189] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 558.188693] env[62503]: ERROR nova.compute.manager [instance: a62798a5-37ba-45be-be56-76e19ce3e189] raise exception.PortBindingFailed(port_id=port['id']) [ 558.188693] env[62503]: ERROR nova.compute.manager [instance: a62798a5-37ba-45be-be56-76e19ce3e189] nova.exception.PortBindingFailed: Binding failed for port b1294331-b052-4254-aac2-3ccceb5a52ef, please check neutron logs for more information. [ 558.188693] env[62503]: ERROR nova.compute.manager [instance: a62798a5-37ba-45be-be56-76e19ce3e189] [ 558.188995] env[62503]: DEBUG nova.compute.utils [None req-e36023cb-3d1e-4b47-ab32-fcc44cb2b55b tempest-ServerExternalEventsTest-1279836267 tempest-ServerExternalEventsTest-1279836267-project-member] [instance: a62798a5-37ba-45be-be56-76e19ce3e189] Binding failed for port b1294331-b052-4254-aac2-3ccceb5a52ef, please check neutron logs for more information. {{(pid=62503) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 558.191485] env[62503]: DEBUG oslo_concurrency.lockutils [None req-eb0402ba-4447-4563-bad3-226e21db94d7 tempest-ServersAdminNegativeTestJSON-275877041 tempest-ServersAdminNegativeTestJSON-275877041-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.495s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 558.193068] env[62503]: INFO nova.compute.claims [None req-eb0402ba-4447-4563-bad3-226e21db94d7 tempest-ServersAdminNegativeTestJSON-275877041 tempest-ServersAdminNegativeTestJSON-275877041-project-member] [instance: 9f83ec50-5143-45e1-849a-5c441d2702e2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 558.198924] env[62503]: DEBUG nova.compute.manager [None req-e36023cb-3d1e-4b47-ab32-fcc44cb2b55b tempest-ServerExternalEventsTest-1279836267 tempest-ServerExternalEventsTest-1279836267-project-member] [instance: a62798a5-37ba-45be-be56-76e19ce3e189] Build of instance a62798a5-37ba-45be-be56-76e19ce3e189 was re-scheduled: Binding failed for port b1294331-b052-4254-aac2-3ccceb5a52ef, please check neutron logs for more information. {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2483}} [ 558.199430] env[62503]: DEBUG nova.compute.manager [None req-e36023cb-3d1e-4b47-ab32-fcc44cb2b55b tempest-ServerExternalEventsTest-1279836267 tempest-ServerExternalEventsTest-1279836267-project-member] [instance: a62798a5-37ba-45be-be56-76e19ce3e189] Unplugging VIFs for instance {{(pid=62503) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3009}} [ 558.199657] env[62503]: DEBUG oslo_concurrency.lockutils [None req-e36023cb-3d1e-4b47-ab32-fcc44cb2b55b tempest-ServerExternalEventsTest-1279836267 tempest-ServerExternalEventsTest-1279836267-project-member] Acquiring lock "refresh_cache-a62798a5-37ba-45be-be56-76e19ce3e189" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 558.200225] env[62503]: DEBUG oslo_concurrency.lockutils [None req-e36023cb-3d1e-4b47-ab32-fcc44cb2b55b tempest-ServerExternalEventsTest-1279836267 tempest-ServerExternalEventsTest-1279836267-project-member] Acquired lock "refresh_cache-a62798a5-37ba-45be-be56-76e19ce3e189" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 558.200225] env[62503]: DEBUG nova.network.neutron [None req-e36023cb-3d1e-4b47-ab32-fcc44cb2b55b tempest-ServerExternalEventsTest-1279836267 tempest-ServerExternalEventsTest-1279836267-project-member] [instance: a62798a5-37ba-45be-be56-76e19ce3e189] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 558.272697] env[62503]: DEBUG nova.network.neutron [req-b4ee822b-5e65-4222-b684-3f2df79a49a2 req-5924b813-5f53-4050-9e17-7977ba07bd1f service nova] [instance: 0b1ead99-af3b-41a6-8354-bc451a51133c] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 558.313022] env[62503]: DEBUG nova.network.neutron [-] [instance: 0b1ead99-af3b-41a6-8354-bc451a51133c] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 558.350730] env[62503]: DEBUG nova.compute.manager [None req-be334e62-09cf-48bb-879c-48dba1066c83 tempest-ServersAdminTestJSON-732764820 tempest-ServersAdminTestJSON-732764820-project-member] [instance: 78599fa1-be64-4797-92a9-ebc3a40b59a1] Start spawning the instance on the hypervisor. {{(pid=62503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2641}} [ 558.389108] env[62503]: DEBUG nova.virt.hardware [None req-be334e62-09cf-48bb-879c-48dba1066c83 tempest-ServersAdminTestJSON-732764820 tempest-ServersAdminTestJSON-732764820-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:26:20Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:26:03Z,direct_url=,disk_format='vmdk',id=8150ca02-f879-471d-8913-459408f127a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26d9ee75d25b4018b8bb1fb11c8bd98c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:26:04Z,virtual_size=,visibility=), allow threads: False {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 558.389428] env[62503]: DEBUG nova.virt.hardware [None req-be334e62-09cf-48bb-879c-48dba1066c83 tempest-ServersAdminTestJSON-732764820 tempest-ServersAdminTestJSON-732764820-project-member] Flavor limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 558.396236] env[62503]: DEBUG nova.virt.hardware [None req-be334e62-09cf-48bb-879c-48dba1066c83 tempest-ServersAdminTestJSON-732764820 tempest-ServersAdminTestJSON-732764820-project-member] Image limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 558.396624] env[62503]: DEBUG nova.virt.hardware [None req-be334e62-09cf-48bb-879c-48dba1066c83 tempest-ServersAdminTestJSON-732764820 tempest-ServersAdminTestJSON-732764820-project-member] Flavor pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 558.396698] env[62503]: DEBUG nova.virt.hardware [None req-be334e62-09cf-48bb-879c-48dba1066c83 tempest-ServersAdminTestJSON-732764820 tempest-ServersAdminTestJSON-732764820-project-member] Image pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 558.397191] env[62503]: DEBUG nova.virt.hardware [None req-be334e62-09cf-48bb-879c-48dba1066c83 tempest-ServersAdminTestJSON-732764820 tempest-ServersAdminTestJSON-732764820-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 558.397191] env[62503]: DEBUG nova.virt.hardware [None req-be334e62-09cf-48bb-879c-48dba1066c83 tempest-ServersAdminTestJSON-732764820 tempest-ServersAdminTestJSON-732764820-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 558.397313] env[62503]: DEBUG nova.virt.hardware [None req-be334e62-09cf-48bb-879c-48dba1066c83 tempest-ServersAdminTestJSON-732764820 tempest-ServersAdminTestJSON-732764820-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 558.397424] env[62503]: DEBUG nova.virt.hardware [None req-be334e62-09cf-48bb-879c-48dba1066c83 tempest-ServersAdminTestJSON-732764820 tempest-ServersAdminTestJSON-732764820-project-member] Got 1 possible topologies {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 558.397699] env[62503]: DEBUG nova.virt.hardware [None req-be334e62-09cf-48bb-879c-48dba1066c83 tempest-ServersAdminTestJSON-732764820 tempest-ServersAdminTestJSON-732764820-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 558.397814] env[62503]: DEBUG nova.virt.hardware [None req-be334e62-09cf-48bb-879c-48dba1066c83 tempest-ServersAdminTestJSON-732764820 tempest-ServersAdminTestJSON-732764820-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 558.399084] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9eabfed6-ff4f-4844-99d4-b6abb13fc580 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.412135] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da128fed-ba2c-4b2c-9935-2d53d5b407d4 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.416973] env[62503]: DEBUG nova.network.neutron [None req-be334e62-09cf-48bb-879c-48dba1066c83 tempest-ServersAdminTestJSON-732764820 tempest-ServersAdminTestJSON-732764820-project-member] [instance: 78599fa1-be64-4797-92a9-ebc3a40b59a1] Successfully created port: b4bbbe8d-e2ef-4d38-b0c2-6afa9ec6fcf8 {{(pid=62503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 558.568164] env[62503]: DEBUG oslo_concurrency.lockutils [None req-2db54e89-b7c8-428c-855a-ae3533893b41 tempest-VolumesAssistedSnapshotsTest-64252410 tempest-VolumesAssistedSnapshotsTest-64252410-project-member] Acquiring lock "24b4c233-c874-452e-a7fc-492ca2a49a09" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 558.568164] env[62503]: DEBUG oslo_concurrency.lockutils [None req-2db54e89-b7c8-428c-855a-ae3533893b41 tempest-VolumesAssistedSnapshotsTest-64252410 tempest-VolumesAssistedSnapshotsTest-64252410-project-member] Lock "24b4c233-c874-452e-a7fc-492ca2a49a09" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 558.755303] env[62503]: DEBUG nova.network.neutron [None req-e36023cb-3d1e-4b47-ab32-fcc44cb2b55b tempest-ServerExternalEventsTest-1279836267 tempest-ServerExternalEventsTest-1279836267-project-member] [instance: a62798a5-37ba-45be-be56-76e19ce3e189] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 558.816283] env[62503]: INFO nova.compute.manager [-] [instance: 0b1ead99-af3b-41a6-8354-bc451a51133c] Took 1.06 seconds to deallocate network for instance. [ 558.819157] env[62503]: DEBUG nova.compute.claims [None req-2175fec1-420b-4607-b7a6-fef627f43b34 tempest-ImagesOneServerNegativeTestJSON-1494729482 tempest-ImagesOneServerNegativeTestJSON-1494729482-project-member] [instance: 0b1ead99-af3b-41a6-8354-bc451a51133c] Aborting claim: {{(pid=62503) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 558.819546] env[62503]: DEBUG oslo_concurrency.lockutils [None req-2175fec1-420b-4607-b7a6-fef627f43b34 tempest-ImagesOneServerNegativeTestJSON-1494729482 tempest-ImagesOneServerNegativeTestJSON-1494729482-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 558.831852] env[62503]: DEBUG nova.network.neutron [req-b4ee822b-5e65-4222-b684-3f2df79a49a2 req-5924b813-5f53-4050-9e17-7977ba07bd1f service nova] [instance: 0b1ead99-af3b-41a6-8354-bc451a51133c] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 558.988246] env[62503]: DEBUG nova.network.neutron [None req-e36023cb-3d1e-4b47-ab32-fcc44cb2b55b tempest-ServerExternalEventsTest-1279836267 tempest-ServerExternalEventsTest-1279836267-project-member] [instance: a62798a5-37ba-45be-be56-76e19ce3e189] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 559.074987] env[62503]: ERROR nova.compute.manager [None req-58237dd7-a314-47a2-91b0-62504c9b4c3a tempest-MigrationsAdminTest-1483146718 tempest-MigrationsAdminTest-1483146718-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port ddb1371b-89f1-40c0-8e4a-2fffdc1f62eb, please check neutron logs for more information. [ 559.074987] env[62503]: ERROR nova.compute.manager Traceback (most recent call last): [ 559.074987] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 559.074987] env[62503]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 559.074987] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 559.074987] env[62503]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 559.074987] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 559.074987] env[62503]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 559.074987] env[62503]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 559.074987] env[62503]: ERROR nova.compute.manager self.force_reraise() [ 559.074987] env[62503]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 559.074987] env[62503]: ERROR nova.compute.manager raise self.value [ 559.074987] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 559.074987] env[62503]: ERROR nova.compute.manager updated_port = self._update_port( [ 559.074987] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 559.074987] env[62503]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 559.077985] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 559.077985] env[62503]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 559.077985] env[62503]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port ddb1371b-89f1-40c0-8e4a-2fffdc1f62eb, please check neutron logs for more information. [ 559.077985] env[62503]: ERROR nova.compute.manager [ 559.077985] env[62503]: Traceback (most recent call last): [ 559.077985] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 559.077985] env[62503]: listener.cb(fileno) [ 559.077985] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 559.077985] env[62503]: result = function(*args, **kwargs) [ 559.077985] env[62503]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 559.077985] env[62503]: return func(*args, **kwargs) [ 559.077985] env[62503]: File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 559.077985] env[62503]: raise e [ 559.077985] env[62503]: File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 559.077985] env[62503]: nwinfo = self.network_api.allocate_for_instance( [ 559.077985] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 559.077985] env[62503]: created_port_ids = self._update_ports_for_instance( [ 559.077985] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 559.077985] env[62503]: with excutils.save_and_reraise_exception(): [ 559.077985] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 559.077985] env[62503]: self.force_reraise() [ 559.077985] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 559.077985] env[62503]: raise self.value [ 559.077985] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 559.077985] env[62503]: updated_port = self._update_port( [ 559.077985] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 559.077985] env[62503]: _ensure_no_port_binding_failure(port) [ 559.077985] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 559.077985] env[62503]: raise exception.PortBindingFailed(port_id=port['id']) [ 559.078828] env[62503]: nova.exception.PortBindingFailed: Binding failed for port ddb1371b-89f1-40c0-8e4a-2fffdc1f62eb, please check neutron logs for more information. [ 559.078828] env[62503]: Removing descriptor: 20 [ 559.078828] env[62503]: ERROR nova.compute.manager [None req-58237dd7-a314-47a2-91b0-62504c9b4c3a tempest-MigrationsAdminTest-1483146718 tempest-MigrationsAdminTest-1483146718-project-member] [instance: 18c503dc-6283-4489-a69c-8dead1ec3a0d] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port ddb1371b-89f1-40c0-8e4a-2fffdc1f62eb, please check neutron logs for more information. [ 559.078828] env[62503]: ERROR nova.compute.manager [instance: 18c503dc-6283-4489-a69c-8dead1ec3a0d] Traceback (most recent call last): [ 559.078828] env[62503]: ERROR nova.compute.manager [instance: 18c503dc-6283-4489-a69c-8dead1ec3a0d] File "/opt/stack/nova/nova/compute/manager.py", line 2897, in _build_resources [ 559.078828] env[62503]: ERROR nova.compute.manager [instance: 18c503dc-6283-4489-a69c-8dead1ec3a0d] yield resources [ 559.078828] env[62503]: ERROR nova.compute.manager [instance: 18c503dc-6283-4489-a69c-8dead1ec3a0d] File "/opt/stack/nova/nova/compute/manager.py", line 2644, in _build_and_run_instance [ 559.078828] env[62503]: ERROR nova.compute.manager [instance: 18c503dc-6283-4489-a69c-8dead1ec3a0d] self.driver.spawn(context, instance, image_meta, [ 559.078828] env[62503]: ERROR nova.compute.manager [instance: 18c503dc-6283-4489-a69c-8dead1ec3a0d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 559.078828] env[62503]: ERROR nova.compute.manager [instance: 18c503dc-6283-4489-a69c-8dead1ec3a0d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 559.078828] env[62503]: ERROR nova.compute.manager [instance: 18c503dc-6283-4489-a69c-8dead1ec3a0d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 559.078828] env[62503]: ERROR nova.compute.manager [instance: 18c503dc-6283-4489-a69c-8dead1ec3a0d] vm_ref = self.build_virtual_machine(instance, [ 559.079193] env[62503]: ERROR nova.compute.manager [instance: 18c503dc-6283-4489-a69c-8dead1ec3a0d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 559.079193] env[62503]: ERROR nova.compute.manager [instance: 18c503dc-6283-4489-a69c-8dead1ec3a0d] vif_infos = vmwarevif.get_vif_info(self._session, [ 559.079193] env[62503]: ERROR nova.compute.manager [instance: 18c503dc-6283-4489-a69c-8dead1ec3a0d] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 559.079193] env[62503]: ERROR nova.compute.manager [instance: 18c503dc-6283-4489-a69c-8dead1ec3a0d] for vif in network_info: [ 559.079193] env[62503]: ERROR nova.compute.manager [instance: 18c503dc-6283-4489-a69c-8dead1ec3a0d] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 559.079193] env[62503]: ERROR nova.compute.manager [instance: 18c503dc-6283-4489-a69c-8dead1ec3a0d] return self._sync_wrapper(fn, *args, **kwargs) [ 559.079193] env[62503]: ERROR nova.compute.manager [instance: 18c503dc-6283-4489-a69c-8dead1ec3a0d] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 559.079193] env[62503]: ERROR nova.compute.manager [instance: 18c503dc-6283-4489-a69c-8dead1ec3a0d] self.wait() [ 559.079193] env[62503]: ERROR nova.compute.manager [instance: 18c503dc-6283-4489-a69c-8dead1ec3a0d] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 559.079193] env[62503]: ERROR nova.compute.manager [instance: 18c503dc-6283-4489-a69c-8dead1ec3a0d] self[:] = self._gt.wait() [ 559.079193] env[62503]: ERROR nova.compute.manager [instance: 18c503dc-6283-4489-a69c-8dead1ec3a0d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 559.079193] env[62503]: ERROR nova.compute.manager [instance: 18c503dc-6283-4489-a69c-8dead1ec3a0d] return self._exit_event.wait() [ 559.079193] env[62503]: ERROR nova.compute.manager [instance: 18c503dc-6283-4489-a69c-8dead1ec3a0d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 559.079571] env[62503]: ERROR nova.compute.manager [instance: 18c503dc-6283-4489-a69c-8dead1ec3a0d] result = hub.switch() [ 559.079571] env[62503]: ERROR nova.compute.manager [instance: 18c503dc-6283-4489-a69c-8dead1ec3a0d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 559.079571] env[62503]: ERROR nova.compute.manager [instance: 18c503dc-6283-4489-a69c-8dead1ec3a0d] return self.greenlet.switch() [ 559.079571] env[62503]: ERROR nova.compute.manager [instance: 18c503dc-6283-4489-a69c-8dead1ec3a0d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 559.079571] env[62503]: ERROR nova.compute.manager [instance: 18c503dc-6283-4489-a69c-8dead1ec3a0d] result = function(*args, **kwargs) [ 559.079571] env[62503]: ERROR nova.compute.manager [instance: 18c503dc-6283-4489-a69c-8dead1ec3a0d] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 559.079571] env[62503]: ERROR nova.compute.manager [instance: 18c503dc-6283-4489-a69c-8dead1ec3a0d] return func(*args, **kwargs) [ 559.079571] env[62503]: ERROR nova.compute.manager [instance: 18c503dc-6283-4489-a69c-8dead1ec3a0d] File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 559.079571] env[62503]: ERROR nova.compute.manager [instance: 18c503dc-6283-4489-a69c-8dead1ec3a0d] raise e [ 559.079571] env[62503]: ERROR nova.compute.manager [instance: 18c503dc-6283-4489-a69c-8dead1ec3a0d] File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 559.079571] env[62503]: ERROR nova.compute.manager [instance: 18c503dc-6283-4489-a69c-8dead1ec3a0d] nwinfo = self.network_api.allocate_for_instance( [ 559.079571] env[62503]: ERROR nova.compute.manager [instance: 18c503dc-6283-4489-a69c-8dead1ec3a0d] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 559.079571] env[62503]: ERROR nova.compute.manager [instance: 18c503dc-6283-4489-a69c-8dead1ec3a0d] created_port_ids = self._update_ports_for_instance( [ 559.079962] env[62503]: ERROR nova.compute.manager [instance: 18c503dc-6283-4489-a69c-8dead1ec3a0d] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 559.079962] env[62503]: ERROR nova.compute.manager [instance: 18c503dc-6283-4489-a69c-8dead1ec3a0d] with excutils.save_and_reraise_exception(): [ 559.079962] env[62503]: ERROR nova.compute.manager [instance: 18c503dc-6283-4489-a69c-8dead1ec3a0d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 559.079962] env[62503]: ERROR nova.compute.manager [instance: 18c503dc-6283-4489-a69c-8dead1ec3a0d] self.force_reraise() [ 559.079962] env[62503]: ERROR nova.compute.manager [instance: 18c503dc-6283-4489-a69c-8dead1ec3a0d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 559.079962] env[62503]: ERROR nova.compute.manager [instance: 18c503dc-6283-4489-a69c-8dead1ec3a0d] raise self.value [ 559.079962] env[62503]: ERROR nova.compute.manager [instance: 18c503dc-6283-4489-a69c-8dead1ec3a0d] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 559.079962] env[62503]: ERROR nova.compute.manager [instance: 18c503dc-6283-4489-a69c-8dead1ec3a0d] updated_port = self._update_port( [ 559.079962] env[62503]: ERROR nova.compute.manager [instance: 18c503dc-6283-4489-a69c-8dead1ec3a0d] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 559.079962] env[62503]: ERROR nova.compute.manager [instance: 18c503dc-6283-4489-a69c-8dead1ec3a0d] _ensure_no_port_binding_failure(port) [ 559.079962] env[62503]: ERROR nova.compute.manager [instance: 18c503dc-6283-4489-a69c-8dead1ec3a0d] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 559.079962] env[62503]: ERROR nova.compute.manager [instance: 18c503dc-6283-4489-a69c-8dead1ec3a0d] raise exception.PortBindingFailed(port_id=port['id']) [ 559.080313] env[62503]: ERROR nova.compute.manager [instance: 18c503dc-6283-4489-a69c-8dead1ec3a0d] nova.exception.PortBindingFailed: Binding failed for port ddb1371b-89f1-40c0-8e4a-2fffdc1f62eb, please check neutron logs for more information. [ 559.080313] env[62503]: ERROR nova.compute.manager [instance: 18c503dc-6283-4489-a69c-8dead1ec3a0d] [ 559.080313] env[62503]: INFO nova.compute.manager [None req-58237dd7-a314-47a2-91b0-62504c9b4c3a tempest-MigrationsAdminTest-1483146718 tempest-MigrationsAdminTest-1483146718-project-member] [instance: 18c503dc-6283-4489-a69c-8dead1ec3a0d] Terminating instance [ 559.082209] env[62503]: DEBUG oslo_concurrency.lockutils [None req-58237dd7-a314-47a2-91b0-62504c9b4c3a tempest-MigrationsAdminTest-1483146718 tempest-MigrationsAdminTest-1483146718-project-member] Acquiring lock "refresh_cache-18c503dc-6283-4489-a69c-8dead1ec3a0d" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 559.082209] env[62503]: DEBUG oslo_concurrency.lockutils [None req-58237dd7-a314-47a2-91b0-62504c9b4c3a tempest-MigrationsAdminTest-1483146718 tempest-MigrationsAdminTest-1483146718-project-member] Acquired lock "refresh_cache-18c503dc-6283-4489-a69c-8dead1ec3a0d" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 559.082209] env[62503]: DEBUG nova.network.neutron [None req-58237dd7-a314-47a2-91b0-62504c9b4c3a tempest-MigrationsAdminTest-1483146718 tempest-MigrationsAdminTest-1483146718-project-member] [instance: 18c503dc-6283-4489-a69c-8dead1ec3a0d] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 559.335098] env[62503]: DEBUG oslo_concurrency.lockutils [req-b4ee822b-5e65-4222-b684-3f2df79a49a2 req-5924b813-5f53-4050-9e17-7977ba07bd1f service nova] Releasing lock "refresh_cache-0b1ead99-af3b-41a6-8354-bc451a51133c" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 559.356637] env[62503]: DEBUG nova.compute.manager [req-936a5aab-b91c-4388-aa4e-ffd4dc0598dc req-609f0691-a3b6-4cae-adf7-2a574acdf92b service nova] [instance: 18c503dc-6283-4489-a69c-8dead1ec3a0d] Received event network-changed-ddb1371b-89f1-40c0-8e4a-2fffdc1f62eb {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 559.357328] env[62503]: DEBUG nova.compute.manager [req-936a5aab-b91c-4388-aa4e-ffd4dc0598dc req-609f0691-a3b6-4cae-adf7-2a574acdf92b service nova] [instance: 18c503dc-6283-4489-a69c-8dead1ec3a0d] Refreshing instance network info cache due to event network-changed-ddb1371b-89f1-40c0-8e4a-2fffdc1f62eb. {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11432}} [ 559.357418] env[62503]: DEBUG oslo_concurrency.lockutils [req-936a5aab-b91c-4388-aa4e-ffd4dc0598dc req-609f0691-a3b6-4cae-adf7-2a574acdf92b service nova] Acquiring lock "refresh_cache-18c503dc-6283-4489-a69c-8dead1ec3a0d" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 559.430641] env[62503]: DEBUG nova.compute.manager [req-5139a218-9677-4cd4-aeb4-7fc1d0896334 req-b01177bf-214a-455f-a77d-9ef94aa0e291 service nova] [instance: 0b1ead99-af3b-41a6-8354-bc451a51133c] Received event network-vif-deleted-85548955-452a-4861-a7b7-7b7a736f42d3 {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 559.482358] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21e371aa-6442-4597-8f3f-6a2097957861 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 559.490530] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7eae1519-5e1c-49f9-8338-31d5b09fa969 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 559.494604] env[62503]: DEBUG oslo_concurrency.lockutils [None req-e36023cb-3d1e-4b47-ab32-fcc44cb2b55b tempest-ServerExternalEventsTest-1279836267 tempest-ServerExternalEventsTest-1279836267-project-member] Releasing lock "refresh_cache-a62798a5-37ba-45be-be56-76e19ce3e189" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 559.495788] env[62503]: DEBUG nova.compute.manager [None req-e36023cb-3d1e-4b47-ab32-fcc44cb2b55b tempest-ServerExternalEventsTest-1279836267 tempest-ServerExternalEventsTest-1279836267-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62503) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3032}} [ 559.495788] env[62503]: DEBUG nova.compute.manager [None req-e36023cb-3d1e-4b47-ab32-fcc44cb2b55b tempest-ServerExternalEventsTest-1279836267 tempest-ServerExternalEventsTest-1279836267-project-member] [instance: a62798a5-37ba-45be-be56-76e19ce3e189] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 559.496049] env[62503]: DEBUG nova.network.neutron [None req-e36023cb-3d1e-4b47-ab32-fcc44cb2b55b tempest-ServerExternalEventsTest-1279836267 tempest-ServerExternalEventsTest-1279836267-project-member] [instance: a62798a5-37ba-45be-be56-76e19ce3e189] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 559.535672] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a99df94-3f57-4da7-ba5f-8ec138107037 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 559.544877] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80994abe-da4d-4ceb-8b3f-d79ffc9d329f {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 559.559986] env[62503]: DEBUG nova.compute.provider_tree [None req-eb0402ba-4447-4563-bad3-226e21db94d7 tempest-ServersAdminNegativeTestJSON-275877041 tempest-ServersAdminNegativeTestJSON-275877041-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 559.600957] env[62503]: DEBUG nova.network.neutron [None req-e36023cb-3d1e-4b47-ab32-fcc44cb2b55b tempest-ServerExternalEventsTest-1279836267 tempest-ServerExternalEventsTest-1279836267-project-member] [instance: a62798a5-37ba-45be-be56-76e19ce3e189] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 559.636928] env[62503]: DEBUG nova.network.neutron [None req-58237dd7-a314-47a2-91b0-62504c9b4c3a tempest-MigrationsAdminTest-1483146718 tempest-MigrationsAdminTest-1483146718-project-member] [instance: 18c503dc-6283-4489-a69c-8dead1ec3a0d] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 559.767539] env[62503]: DEBUG nova.network.neutron [None req-58237dd7-a314-47a2-91b0-62504c9b4c3a tempest-MigrationsAdminTest-1483146718 tempest-MigrationsAdminTest-1483146718-project-member] [instance: 18c503dc-6283-4489-a69c-8dead1ec3a0d] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 560.063641] env[62503]: DEBUG nova.scheduler.client.report [None req-eb0402ba-4447-4563-bad3-226e21db94d7 tempest-ServersAdminNegativeTestJSON-275877041 tempest-ServersAdminNegativeTestJSON-275877041-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 560.106783] env[62503]: DEBUG nova.network.neutron [None req-e36023cb-3d1e-4b47-ab32-fcc44cb2b55b tempest-ServerExternalEventsTest-1279836267 tempest-ServerExternalEventsTest-1279836267-project-member] [instance: a62798a5-37ba-45be-be56-76e19ce3e189] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 560.270809] env[62503]: DEBUG oslo_concurrency.lockutils [None req-58237dd7-a314-47a2-91b0-62504c9b4c3a tempest-MigrationsAdminTest-1483146718 tempest-MigrationsAdminTest-1483146718-project-member] Releasing lock "refresh_cache-18c503dc-6283-4489-a69c-8dead1ec3a0d" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 560.271629] env[62503]: DEBUG nova.compute.manager [None req-58237dd7-a314-47a2-91b0-62504c9b4c3a tempest-MigrationsAdminTest-1483146718 tempest-MigrationsAdminTest-1483146718-project-member] [instance: 18c503dc-6283-4489-a69c-8dead1ec3a0d] Start destroying the instance on the hypervisor. {{(pid=62503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3156}} [ 560.271629] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-58237dd7-a314-47a2-91b0-62504c9b4c3a tempest-MigrationsAdminTest-1483146718 tempest-MigrationsAdminTest-1483146718-project-member] [instance: 18c503dc-6283-4489-a69c-8dead1ec3a0d] Destroying instance {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 560.271847] env[62503]: DEBUG oslo_concurrency.lockutils [req-936a5aab-b91c-4388-aa4e-ffd4dc0598dc req-609f0691-a3b6-4cae-adf7-2a574acdf92b service nova] Acquired lock "refresh_cache-18c503dc-6283-4489-a69c-8dead1ec3a0d" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 560.271957] env[62503]: DEBUG nova.network.neutron [req-936a5aab-b91c-4388-aa4e-ffd4dc0598dc req-609f0691-a3b6-4cae-adf7-2a574acdf92b service nova] [instance: 18c503dc-6283-4489-a69c-8dead1ec3a0d] Refreshing network info cache for port ddb1371b-89f1-40c0-8e4a-2fffdc1f62eb {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 560.273152] env[62503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-eada9b43-7724-4684-928e-a129d54a6d0a {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 560.284226] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad482bc3-2e05-448e-862a-43f20f518ff4 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 560.316468] env[62503]: WARNING nova.virt.vmwareapi.vmops [None req-58237dd7-a314-47a2-91b0-62504c9b4c3a tempest-MigrationsAdminTest-1483146718 tempest-MigrationsAdminTest-1483146718-project-member] [instance: 18c503dc-6283-4489-a69c-8dead1ec3a0d] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 18c503dc-6283-4489-a69c-8dead1ec3a0d could not be found. [ 560.317098] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-58237dd7-a314-47a2-91b0-62504c9b4c3a tempest-MigrationsAdminTest-1483146718 tempest-MigrationsAdminTest-1483146718-project-member] [instance: 18c503dc-6283-4489-a69c-8dead1ec3a0d] Instance destroyed {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 560.317173] env[62503]: INFO nova.compute.manager [None req-58237dd7-a314-47a2-91b0-62504c9b4c3a tempest-MigrationsAdminTest-1483146718 tempest-MigrationsAdminTest-1483146718-project-member] [instance: 18c503dc-6283-4489-a69c-8dead1ec3a0d] Took 0.05 seconds to destroy the instance on the hypervisor. [ 560.317416] env[62503]: DEBUG oslo.service.loopingcall [None req-58237dd7-a314-47a2-91b0-62504c9b4c3a tempest-MigrationsAdminTest-1483146718 tempest-MigrationsAdminTest-1483146718-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 560.317623] env[62503]: DEBUG nova.compute.manager [-] [instance: 18c503dc-6283-4489-a69c-8dead1ec3a0d] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 560.317718] env[62503]: DEBUG nova.network.neutron [-] [instance: 18c503dc-6283-4489-a69c-8dead1ec3a0d] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 560.346523] env[62503]: DEBUG nova.network.neutron [-] [instance: 18c503dc-6283-4489-a69c-8dead1ec3a0d] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 560.570535] env[62503]: DEBUG oslo_concurrency.lockutils [None req-eb0402ba-4447-4563-bad3-226e21db94d7 tempest-ServersAdminNegativeTestJSON-275877041 tempest-ServersAdminNegativeTestJSON-275877041-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.379s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 560.570535] env[62503]: DEBUG nova.compute.manager [None req-eb0402ba-4447-4563-bad3-226e21db94d7 tempest-ServersAdminNegativeTestJSON-275877041 tempest-ServersAdminNegativeTestJSON-275877041-project-member] [instance: 9f83ec50-5143-45e1-849a-5c441d2702e2] Start building networks asynchronously for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2832}} [ 560.579604] env[62503]: DEBUG oslo_concurrency.lockutils [None req-fe1e164b-4aab-4244-8a41-f3565d333c9b tempest-ServerDiagnosticsTest-976195064 tempest-ServerDiagnosticsTest-976195064-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 10.791s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 560.611053] env[62503]: INFO nova.compute.manager [None req-e36023cb-3d1e-4b47-ab32-fcc44cb2b55b tempest-ServerExternalEventsTest-1279836267 tempest-ServerExternalEventsTest-1279836267-project-member] [instance: a62798a5-37ba-45be-be56-76e19ce3e189] Took 1.11 seconds to deallocate network for instance. [ 560.851031] env[62503]: DEBUG nova.network.neutron [-] [instance: 18c503dc-6283-4489-a69c-8dead1ec3a0d] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 560.882734] env[62503]: DEBUG nova.network.neutron [req-936a5aab-b91c-4388-aa4e-ffd4dc0598dc req-609f0691-a3b6-4cae-adf7-2a574acdf92b service nova] [instance: 18c503dc-6283-4489-a69c-8dead1ec3a0d] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 561.004145] env[62503]: DEBUG oslo_concurrency.lockutils [None req-6fd2735d-f55f-4cf9-b0ab-01612c901fa1 tempest-AttachInterfacesUnderV243Test-714583448 tempest-AttachInterfacesUnderV243Test-714583448-project-member] Acquiring lock "20bf8c62-8b80-45c2-98d4-5a960f465aa0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 561.004435] env[62503]: DEBUG oslo_concurrency.lockutils [None req-6fd2735d-f55f-4cf9-b0ab-01612c901fa1 tempest-AttachInterfacesUnderV243Test-714583448 tempest-AttachInterfacesUnderV243Test-714583448-project-member] Lock "20bf8c62-8b80-45c2-98d4-5a960f465aa0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 561.046450] env[62503]: DEBUG nova.network.neutron [req-936a5aab-b91c-4388-aa4e-ffd4dc0598dc req-609f0691-a3b6-4cae-adf7-2a574acdf92b service nova] [instance: 18c503dc-6283-4489-a69c-8dead1ec3a0d] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 561.080784] env[62503]: DEBUG nova.compute.utils [None req-eb0402ba-4447-4563-bad3-226e21db94d7 tempest-ServersAdminNegativeTestJSON-275877041 tempest-ServersAdminNegativeTestJSON-275877041-project-member] Using /dev/sd instead of None {{(pid=62503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 561.082556] env[62503]: DEBUG nova.compute.manager [None req-eb0402ba-4447-4563-bad3-226e21db94d7 tempest-ServersAdminNegativeTestJSON-275877041 tempest-ServersAdminNegativeTestJSON-275877041-project-member] [instance: 9f83ec50-5143-45e1-849a-5c441d2702e2] Allocating IP information in the background. {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 561.082748] env[62503]: DEBUG nova.network.neutron [None req-eb0402ba-4447-4563-bad3-226e21db94d7 tempest-ServersAdminNegativeTestJSON-275877041 tempest-ServersAdminNegativeTestJSON-275877041-project-member] [instance: 9f83ec50-5143-45e1-849a-5c441d2702e2] allocate_for_instance() {{(pid=62503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 561.290246] env[62503]: DEBUG nova.policy [None req-eb0402ba-4447-4563-bad3-226e21db94d7 tempest-ServersAdminNegativeTestJSON-275877041 tempest-ServersAdminNegativeTestJSON-275877041-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '960acb38f91840beb1e1906d22901495', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'aec76ae10d55486ab3b463d99414f320', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62503) authorize /opt/stack/nova/nova/policy.py:201}} [ 561.353731] env[62503]: INFO nova.compute.manager [-] [instance: 18c503dc-6283-4489-a69c-8dead1ec3a0d] Took 1.04 seconds to deallocate network for instance. [ 561.360791] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d247c4b-1c8d-4fbf-933a-77cb7390bfeb {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.363839] env[62503]: DEBUG nova.compute.claims [None req-58237dd7-a314-47a2-91b0-62504c9b4c3a tempest-MigrationsAdminTest-1483146718 tempest-MigrationsAdminTest-1483146718-project-member] [instance: 18c503dc-6283-4489-a69c-8dead1ec3a0d] Aborting claim: {{(pid=62503) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 561.364093] env[62503]: DEBUG oslo_concurrency.lockutils [None req-58237dd7-a314-47a2-91b0-62504c9b4c3a tempest-MigrationsAdminTest-1483146718 tempest-MigrationsAdminTest-1483146718-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 561.370199] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c39417f-a5e7-4e76-a7ba-3952d9df62ce {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.406134] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2c7f151-f151-4cce-b557-47b2dd615e11 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.414178] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc1e7e58-2293-40e7-9af1-78f222455603 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.429281] env[62503]: DEBUG nova.compute.provider_tree [None req-fe1e164b-4aab-4244-8a41-f3565d333c9b tempest-ServerDiagnosticsTest-976195064 tempest-ServerDiagnosticsTest-976195064-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 561.551784] env[62503]: DEBUG oslo_concurrency.lockutils [req-936a5aab-b91c-4388-aa4e-ffd4dc0598dc req-609f0691-a3b6-4cae-adf7-2a574acdf92b service nova] Releasing lock "refresh_cache-18c503dc-6283-4489-a69c-8dead1ec3a0d" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 561.590918] env[62503]: DEBUG nova.compute.manager [None req-eb0402ba-4447-4563-bad3-226e21db94d7 tempest-ServersAdminNegativeTestJSON-275877041 tempest-ServersAdminNegativeTestJSON-275877041-project-member] [instance: 9f83ec50-5143-45e1-849a-5c441d2702e2] Start building block device mappings for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2867}} [ 561.635514] env[62503]: ERROR nova.compute.manager [None req-8e02d996-bd10-462a-bd9c-912409f6543d tempest-ServersAdminTestJSON-732764820 tempest-ServersAdminTestJSON-732764820-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port f85bee01-59b6-479d-9ed7-a1360aedabcf, please check neutron logs for more information. [ 561.635514] env[62503]: ERROR nova.compute.manager Traceback (most recent call last): [ 561.635514] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 561.635514] env[62503]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 561.635514] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 561.635514] env[62503]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 561.635514] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 561.635514] env[62503]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 561.635514] env[62503]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 561.635514] env[62503]: ERROR nova.compute.manager self.force_reraise() [ 561.635514] env[62503]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 561.635514] env[62503]: ERROR nova.compute.manager raise self.value [ 561.635514] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 561.635514] env[62503]: ERROR nova.compute.manager updated_port = self._update_port( [ 561.635514] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 561.635514] env[62503]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 561.636036] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 561.636036] env[62503]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 561.636036] env[62503]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port f85bee01-59b6-479d-9ed7-a1360aedabcf, please check neutron logs for more information. [ 561.636036] env[62503]: ERROR nova.compute.manager [ 561.636036] env[62503]: Traceback (most recent call last): [ 561.636036] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 561.636036] env[62503]: listener.cb(fileno) [ 561.636036] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 561.636036] env[62503]: result = function(*args, **kwargs) [ 561.636036] env[62503]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 561.636036] env[62503]: return func(*args, **kwargs) [ 561.636036] env[62503]: File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 561.636036] env[62503]: raise e [ 561.636036] env[62503]: File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 561.636036] env[62503]: nwinfo = self.network_api.allocate_for_instance( [ 561.636036] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 561.636036] env[62503]: created_port_ids = self._update_ports_for_instance( [ 561.636036] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 561.636036] env[62503]: with excutils.save_and_reraise_exception(): [ 561.636036] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 561.636036] env[62503]: self.force_reraise() [ 561.636036] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 561.636036] env[62503]: raise self.value [ 561.636036] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 561.636036] env[62503]: updated_port = self._update_port( [ 561.636036] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 561.636036] env[62503]: _ensure_no_port_binding_failure(port) [ 561.636036] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 561.636036] env[62503]: raise exception.PortBindingFailed(port_id=port['id']) [ 561.636824] env[62503]: nova.exception.PortBindingFailed: Binding failed for port f85bee01-59b6-479d-9ed7-a1360aedabcf, please check neutron logs for more information. [ 561.636824] env[62503]: Removing descriptor: 16 [ 561.636824] env[62503]: ERROR nova.compute.manager [None req-8e02d996-bd10-462a-bd9c-912409f6543d tempest-ServersAdminTestJSON-732764820 tempest-ServersAdminTestJSON-732764820-project-member] [instance: 24e6abd6-fb6f-49ba-b01b-3977ff205fef] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port f85bee01-59b6-479d-9ed7-a1360aedabcf, please check neutron logs for more information. [ 561.636824] env[62503]: ERROR nova.compute.manager [instance: 24e6abd6-fb6f-49ba-b01b-3977ff205fef] Traceback (most recent call last): [ 561.636824] env[62503]: ERROR nova.compute.manager [instance: 24e6abd6-fb6f-49ba-b01b-3977ff205fef] File "/opt/stack/nova/nova/compute/manager.py", line 2897, in _build_resources [ 561.636824] env[62503]: ERROR nova.compute.manager [instance: 24e6abd6-fb6f-49ba-b01b-3977ff205fef] yield resources [ 561.636824] env[62503]: ERROR nova.compute.manager [instance: 24e6abd6-fb6f-49ba-b01b-3977ff205fef] File "/opt/stack/nova/nova/compute/manager.py", line 2644, in _build_and_run_instance [ 561.636824] env[62503]: ERROR nova.compute.manager [instance: 24e6abd6-fb6f-49ba-b01b-3977ff205fef] self.driver.spawn(context, instance, image_meta, [ 561.636824] env[62503]: ERROR nova.compute.manager [instance: 24e6abd6-fb6f-49ba-b01b-3977ff205fef] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 561.636824] env[62503]: ERROR nova.compute.manager [instance: 24e6abd6-fb6f-49ba-b01b-3977ff205fef] self._vmops.spawn(context, instance, image_meta, injected_files, [ 561.636824] env[62503]: ERROR nova.compute.manager [instance: 24e6abd6-fb6f-49ba-b01b-3977ff205fef] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 561.636824] env[62503]: ERROR nova.compute.manager [instance: 24e6abd6-fb6f-49ba-b01b-3977ff205fef] vm_ref = self.build_virtual_machine(instance, [ 561.637150] env[62503]: ERROR nova.compute.manager [instance: 24e6abd6-fb6f-49ba-b01b-3977ff205fef] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 561.637150] env[62503]: ERROR nova.compute.manager [instance: 24e6abd6-fb6f-49ba-b01b-3977ff205fef] vif_infos = vmwarevif.get_vif_info(self._session, [ 561.637150] env[62503]: ERROR nova.compute.manager [instance: 24e6abd6-fb6f-49ba-b01b-3977ff205fef] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 561.637150] env[62503]: ERROR nova.compute.manager [instance: 24e6abd6-fb6f-49ba-b01b-3977ff205fef] for vif in network_info: [ 561.637150] env[62503]: ERROR nova.compute.manager [instance: 24e6abd6-fb6f-49ba-b01b-3977ff205fef] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 561.637150] env[62503]: ERROR nova.compute.manager [instance: 24e6abd6-fb6f-49ba-b01b-3977ff205fef] return self._sync_wrapper(fn, *args, **kwargs) [ 561.637150] env[62503]: ERROR nova.compute.manager [instance: 24e6abd6-fb6f-49ba-b01b-3977ff205fef] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 561.637150] env[62503]: ERROR nova.compute.manager [instance: 24e6abd6-fb6f-49ba-b01b-3977ff205fef] self.wait() [ 561.637150] env[62503]: ERROR nova.compute.manager [instance: 24e6abd6-fb6f-49ba-b01b-3977ff205fef] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 561.637150] env[62503]: ERROR nova.compute.manager [instance: 24e6abd6-fb6f-49ba-b01b-3977ff205fef] self[:] = self._gt.wait() [ 561.637150] env[62503]: ERROR nova.compute.manager [instance: 24e6abd6-fb6f-49ba-b01b-3977ff205fef] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 561.637150] env[62503]: ERROR nova.compute.manager [instance: 24e6abd6-fb6f-49ba-b01b-3977ff205fef] return self._exit_event.wait() [ 561.637150] env[62503]: ERROR nova.compute.manager [instance: 24e6abd6-fb6f-49ba-b01b-3977ff205fef] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 561.637491] env[62503]: ERROR nova.compute.manager [instance: 24e6abd6-fb6f-49ba-b01b-3977ff205fef] result = hub.switch() [ 561.637491] env[62503]: ERROR nova.compute.manager [instance: 24e6abd6-fb6f-49ba-b01b-3977ff205fef] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 561.637491] env[62503]: ERROR nova.compute.manager [instance: 24e6abd6-fb6f-49ba-b01b-3977ff205fef] return self.greenlet.switch() [ 561.637491] env[62503]: ERROR nova.compute.manager [instance: 24e6abd6-fb6f-49ba-b01b-3977ff205fef] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 561.637491] env[62503]: ERROR nova.compute.manager [instance: 24e6abd6-fb6f-49ba-b01b-3977ff205fef] result = function(*args, **kwargs) [ 561.637491] env[62503]: ERROR nova.compute.manager [instance: 24e6abd6-fb6f-49ba-b01b-3977ff205fef] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 561.637491] env[62503]: ERROR nova.compute.manager [instance: 24e6abd6-fb6f-49ba-b01b-3977ff205fef] return func(*args, **kwargs) [ 561.637491] env[62503]: ERROR nova.compute.manager [instance: 24e6abd6-fb6f-49ba-b01b-3977ff205fef] File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 561.637491] env[62503]: ERROR nova.compute.manager [instance: 24e6abd6-fb6f-49ba-b01b-3977ff205fef] raise e [ 561.637491] env[62503]: ERROR nova.compute.manager [instance: 24e6abd6-fb6f-49ba-b01b-3977ff205fef] File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 561.637491] env[62503]: ERROR nova.compute.manager [instance: 24e6abd6-fb6f-49ba-b01b-3977ff205fef] nwinfo = self.network_api.allocate_for_instance( [ 561.637491] env[62503]: ERROR nova.compute.manager [instance: 24e6abd6-fb6f-49ba-b01b-3977ff205fef] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 561.637491] env[62503]: ERROR nova.compute.manager [instance: 24e6abd6-fb6f-49ba-b01b-3977ff205fef] created_port_ids = self._update_ports_for_instance( [ 561.637802] env[62503]: ERROR nova.compute.manager [instance: 24e6abd6-fb6f-49ba-b01b-3977ff205fef] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 561.637802] env[62503]: ERROR nova.compute.manager [instance: 24e6abd6-fb6f-49ba-b01b-3977ff205fef] with excutils.save_and_reraise_exception(): [ 561.637802] env[62503]: ERROR nova.compute.manager [instance: 24e6abd6-fb6f-49ba-b01b-3977ff205fef] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 561.637802] env[62503]: ERROR nova.compute.manager [instance: 24e6abd6-fb6f-49ba-b01b-3977ff205fef] self.force_reraise() [ 561.637802] env[62503]: ERROR nova.compute.manager [instance: 24e6abd6-fb6f-49ba-b01b-3977ff205fef] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 561.637802] env[62503]: ERROR nova.compute.manager [instance: 24e6abd6-fb6f-49ba-b01b-3977ff205fef] raise self.value [ 561.637802] env[62503]: ERROR nova.compute.manager [instance: 24e6abd6-fb6f-49ba-b01b-3977ff205fef] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 561.637802] env[62503]: ERROR nova.compute.manager [instance: 24e6abd6-fb6f-49ba-b01b-3977ff205fef] updated_port = self._update_port( [ 561.637802] env[62503]: ERROR nova.compute.manager [instance: 24e6abd6-fb6f-49ba-b01b-3977ff205fef] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 561.637802] env[62503]: ERROR nova.compute.manager [instance: 24e6abd6-fb6f-49ba-b01b-3977ff205fef] _ensure_no_port_binding_failure(port) [ 561.637802] env[62503]: ERROR nova.compute.manager [instance: 24e6abd6-fb6f-49ba-b01b-3977ff205fef] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 561.637802] env[62503]: ERROR nova.compute.manager [instance: 24e6abd6-fb6f-49ba-b01b-3977ff205fef] raise exception.PortBindingFailed(port_id=port['id']) [ 561.638105] env[62503]: ERROR nova.compute.manager [instance: 24e6abd6-fb6f-49ba-b01b-3977ff205fef] nova.exception.PortBindingFailed: Binding failed for port f85bee01-59b6-479d-9ed7-a1360aedabcf, please check neutron logs for more information. [ 561.638105] env[62503]: ERROR nova.compute.manager [instance: 24e6abd6-fb6f-49ba-b01b-3977ff205fef] [ 561.638105] env[62503]: INFO nova.compute.manager [None req-8e02d996-bd10-462a-bd9c-912409f6543d tempest-ServersAdminTestJSON-732764820 tempest-ServersAdminTestJSON-732764820-project-member] [instance: 24e6abd6-fb6f-49ba-b01b-3977ff205fef] Terminating instance [ 561.641749] env[62503]: DEBUG oslo_concurrency.lockutils [None req-8e02d996-bd10-462a-bd9c-912409f6543d tempest-ServersAdminTestJSON-732764820 tempest-ServersAdminTestJSON-732764820-project-member] Acquiring lock "refresh_cache-24e6abd6-fb6f-49ba-b01b-3977ff205fef" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 561.641749] env[62503]: DEBUG oslo_concurrency.lockutils [None req-8e02d996-bd10-462a-bd9c-912409f6543d tempest-ServersAdminTestJSON-732764820 tempest-ServersAdminTestJSON-732764820-project-member] Acquired lock "refresh_cache-24e6abd6-fb6f-49ba-b01b-3977ff205fef" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 561.641749] env[62503]: DEBUG nova.network.neutron [None req-8e02d996-bd10-462a-bd9c-912409f6543d tempest-ServersAdminTestJSON-732764820 tempest-ServersAdminTestJSON-732764820-project-member] [instance: 24e6abd6-fb6f-49ba-b01b-3977ff205fef] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 561.678174] env[62503]: INFO nova.scheduler.client.report [None req-e36023cb-3d1e-4b47-ab32-fcc44cb2b55b tempest-ServerExternalEventsTest-1279836267 tempest-ServerExternalEventsTest-1279836267-project-member] Deleted allocations for instance a62798a5-37ba-45be-be56-76e19ce3e189 [ 561.926410] env[62503]: DEBUG nova.compute.manager [req-3cb6147a-6e58-4d5e-ba6b-7a01955ee1e5 req-a36a4732-9014-4579-b29d-d7ec284891e9 service nova] [instance: 18c503dc-6283-4489-a69c-8dead1ec3a0d] Received event network-vif-deleted-ddb1371b-89f1-40c0-8e4a-2fffdc1f62eb {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 561.926634] env[62503]: DEBUG nova.compute.manager [req-3cb6147a-6e58-4d5e-ba6b-7a01955ee1e5 req-a36a4732-9014-4579-b29d-d7ec284891e9 service nova] [instance: 24e6abd6-fb6f-49ba-b01b-3977ff205fef] Received event network-changed-f85bee01-59b6-479d-9ed7-a1360aedabcf {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 561.926789] env[62503]: DEBUG nova.compute.manager [req-3cb6147a-6e58-4d5e-ba6b-7a01955ee1e5 req-a36a4732-9014-4579-b29d-d7ec284891e9 service nova] [instance: 24e6abd6-fb6f-49ba-b01b-3977ff205fef] Refreshing instance network info cache due to event network-changed-f85bee01-59b6-479d-9ed7-a1360aedabcf. {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11432}} [ 561.926973] env[62503]: DEBUG oslo_concurrency.lockutils [req-3cb6147a-6e58-4d5e-ba6b-7a01955ee1e5 req-a36a4732-9014-4579-b29d-d7ec284891e9 service nova] Acquiring lock "refresh_cache-24e6abd6-fb6f-49ba-b01b-3977ff205fef" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 561.933308] env[62503]: DEBUG nova.scheduler.client.report [None req-fe1e164b-4aab-4244-8a41-f3565d333c9b tempest-ServerDiagnosticsTest-976195064 tempest-ServerDiagnosticsTest-976195064-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 562.189511] env[62503]: DEBUG oslo_concurrency.lockutils [None req-e36023cb-3d1e-4b47-ab32-fcc44cb2b55b tempest-ServerExternalEventsTest-1279836267 tempest-ServerExternalEventsTest-1279836267-project-member] Lock "a62798a5-37ba-45be-be56-76e19ce3e189" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 28.044s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 562.199900] env[62503]: DEBUG nova.network.neutron [None req-8e02d996-bd10-462a-bd9c-912409f6543d tempest-ServersAdminTestJSON-732764820 tempest-ServersAdminTestJSON-732764820-project-member] [instance: 24e6abd6-fb6f-49ba-b01b-3977ff205fef] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 562.313299] env[62503]: DEBUG nova.network.neutron [None req-eb0402ba-4447-4563-bad3-226e21db94d7 tempest-ServersAdminNegativeTestJSON-275877041 tempest-ServersAdminNegativeTestJSON-275877041-project-member] [instance: 9f83ec50-5143-45e1-849a-5c441d2702e2] Successfully created port: 5e215c2e-3299-4635-90c3-831728c1b765 {{(pid=62503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 562.441530] env[62503]: DEBUG oslo_concurrency.lockutils [None req-fe1e164b-4aab-4244-8a41-f3565d333c9b tempest-ServerDiagnosticsTest-976195064 tempest-ServerDiagnosticsTest-976195064-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.862s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 562.442153] env[62503]: ERROR nova.compute.manager [None req-fe1e164b-4aab-4244-8a41-f3565d333c9b tempest-ServerDiagnosticsTest-976195064 tempest-ServerDiagnosticsTest-976195064-project-member] [instance: d88c42de-dafe-4bb1-bd56-a770524529f3] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port fe41d699-e1a6-4055-af31-ce12305bcc1c, please check neutron logs for more information. [ 562.442153] env[62503]: ERROR nova.compute.manager [instance: d88c42de-dafe-4bb1-bd56-a770524529f3] Traceback (most recent call last): [ 562.442153] env[62503]: ERROR nova.compute.manager [instance: d88c42de-dafe-4bb1-bd56-a770524529f3] File "/opt/stack/nova/nova/compute/manager.py", line 2644, in _build_and_run_instance [ 562.442153] env[62503]: ERROR nova.compute.manager [instance: d88c42de-dafe-4bb1-bd56-a770524529f3] self.driver.spawn(context, instance, image_meta, [ 562.442153] env[62503]: ERROR nova.compute.manager [instance: d88c42de-dafe-4bb1-bd56-a770524529f3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 562.442153] env[62503]: ERROR nova.compute.manager [instance: d88c42de-dafe-4bb1-bd56-a770524529f3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 562.442153] env[62503]: ERROR nova.compute.manager [instance: d88c42de-dafe-4bb1-bd56-a770524529f3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 562.442153] env[62503]: ERROR nova.compute.manager [instance: d88c42de-dafe-4bb1-bd56-a770524529f3] vm_ref = self.build_virtual_machine(instance, [ 562.442153] env[62503]: ERROR nova.compute.manager [instance: d88c42de-dafe-4bb1-bd56-a770524529f3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 562.442153] env[62503]: ERROR nova.compute.manager [instance: d88c42de-dafe-4bb1-bd56-a770524529f3] vif_infos = vmwarevif.get_vif_info(self._session, [ 562.442153] env[62503]: ERROR nova.compute.manager [instance: d88c42de-dafe-4bb1-bd56-a770524529f3] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 562.442512] env[62503]: ERROR nova.compute.manager [instance: d88c42de-dafe-4bb1-bd56-a770524529f3] for vif in network_info: [ 562.442512] env[62503]: ERROR nova.compute.manager [instance: d88c42de-dafe-4bb1-bd56-a770524529f3] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 562.442512] env[62503]: ERROR nova.compute.manager [instance: d88c42de-dafe-4bb1-bd56-a770524529f3] return self._sync_wrapper(fn, *args, **kwargs) [ 562.442512] env[62503]: ERROR nova.compute.manager [instance: d88c42de-dafe-4bb1-bd56-a770524529f3] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 562.442512] env[62503]: ERROR nova.compute.manager [instance: d88c42de-dafe-4bb1-bd56-a770524529f3] self.wait() [ 562.442512] env[62503]: ERROR nova.compute.manager [instance: d88c42de-dafe-4bb1-bd56-a770524529f3] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 562.442512] env[62503]: ERROR nova.compute.manager [instance: d88c42de-dafe-4bb1-bd56-a770524529f3] self[:] = self._gt.wait() [ 562.442512] env[62503]: ERROR nova.compute.manager [instance: d88c42de-dafe-4bb1-bd56-a770524529f3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 562.442512] env[62503]: ERROR nova.compute.manager [instance: d88c42de-dafe-4bb1-bd56-a770524529f3] return self._exit_event.wait() [ 562.442512] env[62503]: ERROR nova.compute.manager [instance: d88c42de-dafe-4bb1-bd56-a770524529f3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 562.442512] env[62503]: ERROR nova.compute.manager [instance: d88c42de-dafe-4bb1-bd56-a770524529f3] result = hub.switch() [ 562.442512] env[62503]: ERROR nova.compute.manager [instance: d88c42de-dafe-4bb1-bd56-a770524529f3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 562.442512] env[62503]: ERROR nova.compute.manager [instance: d88c42de-dafe-4bb1-bd56-a770524529f3] return self.greenlet.switch() [ 562.442928] env[62503]: ERROR nova.compute.manager [instance: d88c42de-dafe-4bb1-bd56-a770524529f3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 562.442928] env[62503]: ERROR nova.compute.manager [instance: d88c42de-dafe-4bb1-bd56-a770524529f3] result = function(*args, **kwargs) [ 562.442928] env[62503]: ERROR nova.compute.manager [instance: d88c42de-dafe-4bb1-bd56-a770524529f3] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 562.442928] env[62503]: ERROR nova.compute.manager [instance: d88c42de-dafe-4bb1-bd56-a770524529f3] return func(*args, **kwargs) [ 562.442928] env[62503]: ERROR nova.compute.manager [instance: d88c42de-dafe-4bb1-bd56-a770524529f3] File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 562.442928] env[62503]: ERROR nova.compute.manager [instance: d88c42de-dafe-4bb1-bd56-a770524529f3] raise e [ 562.442928] env[62503]: ERROR nova.compute.manager [instance: d88c42de-dafe-4bb1-bd56-a770524529f3] File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 562.442928] env[62503]: ERROR nova.compute.manager [instance: d88c42de-dafe-4bb1-bd56-a770524529f3] nwinfo = self.network_api.allocate_for_instance( [ 562.442928] env[62503]: ERROR nova.compute.manager [instance: d88c42de-dafe-4bb1-bd56-a770524529f3] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 562.442928] env[62503]: ERROR nova.compute.manager [instance: d88c42de-dafe-4bb1-bd56-a770524529f3] created_port_ids = self._update_ports_for_instance( [ 562.442928] env[62503]: ERROR nova.compute.manager [instance: d88c42de-dafe-4bb1-bd56-a770524529f3] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 562.442928] env[62503]: ERROR nova.compute.manager [instance: d88c42de-dafe-4bb1-bd56-a770524529f3] with excutils.save_and_reraise_exception(): [ 562.442928] env[62503]: ERROR nova.compute.manager [instance: d88c42de-dafe-4bb1-bd56-a770524529f3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 562.443303] env[62503]: ERROR nova.compute.manager [instance: d88c42de-dafe-4bb1-bd56-a770524529f3] self.force_reraise() [ 562.443303] env[62503]: ERROR nova.compute.manager [instance: d88c42de-dafe-4bb1-bd56-a770524529f3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 562.443303] env[62503]: ERROR nova.compute.manager [instance: d88c42de-dafe-4bb1-bd56-a770524529f3] raise self.value [ 562.443303] env[62503]: ERROR nova.compute.manager [instance: d88c42de-dafe-4bb1-bd56-a770524529f3] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 562.443303] env[62503]: ERROR nova.compute.manager [instance: d88c42de-dafe-4bb1-bd56-a770524529f3] updated_port = self._update_port( [ 562.443303] env[62503]: ERROR nova.compute.manager [instance: d88c42de-dafe-4bb1-bd56-a770524529f3] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 562.443303] env[62503]: ERROR nova.compute.manager [instance: d88c42de-dafe-4bb1-bd56-a770524529f3] _ensure_no_port_binding_failure(port) [ 562.443303] env[62503]: ERROR nova.compute.manager [instance: d88c42de-dafe-4bb1-bd56-a770524529f3] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 562.443303] env[62503]: ERROR nova.compute.manager [instance: d88c42de-dafe-4bb1-bd56-a770524529f3] raise exception.PortBindingFailed(port_id=port['id']) [ 562.443303] env[62503]: ERROR nova.compute.manager [instance: d88c42de-dafe-4bb1-bd56-a770524529f3] nova.exception.PortBindingFailed: Binding failed for port fe41d699-e1a6-4055-af31-ce12305bcc1c, please check neutron logs for more information. [ 562.443303] env[62503]: ERROR nova.compute.manager [instance: d88c42de-dafe-4bb1-bd56-a770524529f3] [ 562.443612] env[62503]: DEBUG nova.compute.utils [None req-fe1e164b-4aab-4244-8a41-f3565d333c9b tempest-ServerDiagnosticsTest-976195064 tempest-ServerDiagnosticsTest-976195064-project-member] [instance: d88c42de-dafe-4bb1-bd56-a770524529f3] Binding failed for port fe41d699-e1a6-4055-af31-ce12305bcc1c, please check neutron logs for more information. {{(pid=62503) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 562.444892] env[62503]: DEBUG oslo_concurrency.lockutils [None req-6237cd83-53e5-445e-991f-0fa75149e2aa tempest-TenantUsagesTestJSON-2099757901 tempest-TenantUsagesTestJSON-2099757901-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 7.431s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 562.448844] env[62503]: DEBUG nova.compute.manager [None req-fe1e164b-4aab-4244-8a41-f3565d333c9b tempest-ServerDiagnosticsTest-976195064 tempest-ServerDiagnosticsTest-976195064-project-member] [instance: d88c42de-dafe-4bb1-bd56-a770524529f3] Build of instance d88c42de-dafe-4bb1-bd56-a770524529f3 was re-scheduled: Binding failed for port fe41d699-e1a6-4055-af31-ce12305bcc1c, please check neutron logs for more information. {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2483}} [ 562.449052] env[62503]: DEBUG nova.compute.manager [None req-fe1e164b-4aab-4244-8a41-f3565d333c9b tempest-ServerDiagnosticsTest-976195064 tempest-ServerDiagnosticsTest-976195064-project-member] [instance: d88c42de-dafe-4bb1-bd56-a770524529f3] Unplugging VIFs for instance {{(pid=62503) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3009}} [ 562.449355] env[62503]: DEBUG oslo_concurrency.lockutils [None req-fe1e164b-4aab-4244-8a41-f3565d333c9b tempest-ServerDiagnosticsTest-976195064 tempest-ServerDiagnosticsTest-976195064-project-member] Acquiring lock "refresh_cache-d88c42de-dafe-4bb1-bd56-a770524529f3" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 562.449623] env[62503]: DEBUG oslo_concurrency.lockutils [None req-fe1e164b-4aab-4244-8a41-f3565d333c9b tempest-ServerDiagnosticsTest-976195064 tempest-ServerDiagnosticsTest-976195064-project-member] Acquired lock "refresh_cache-d88c42de-dafe-4bb1-bd56-a770524529f3" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 562.449623] env[62503]: DEBUG nova.network.neutron [None req-fe1e164b-4aab-4244-8a41-f3565d333c9b tempest-ServerDiagnosticsTest-976195064 tempest-ServerDiagnosticsTest-976195064-project-member] [instance: d88c42de-dafe-4bb1-bd56-a770524529f3] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 562.607161] env[62503]: DEBUG nova.compute.manager [None req-eb0402ba-4447-4563-bad3-226e21db94d7 tempest-ServersAdminNegativeTestJSON-275877041 tempest-ServersAdminNegativeTestJSON-275877041-project-member] [instance: 9f83ec50-5143-45e1-849a-5c441d2702e2] Start spawning the instance on the hypervisor. {{(pid=62503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2641}} [ 562.648933] env[62503]: DEBUG nova.virt.hardware [None req-eb0402ba-4447-4563-bad3-226e21db94d7 tempest-ServersAdminNegativeTestJSON-275877041 tempest-ServersAdminNegativeTestJSON-275877041-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:26:20Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:26:03Z,direct_url=,disk_format='vmdk',id=8150ca02-f879-471d-8913-459408f127a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26d9ee75d25b4018b8bb1fb11c8bd98c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:26:04Z,virtual_size=,visibility=), allow threads: False {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 562.649235] env[62503]: DEBUG nova.virt.hardware [None req-eb0402ba-4447-4563-bad3-226e21db94d7 tempest-ServersAdminNegativeTestJSON-275877041 tempest-ServersAdminNegativeTestJSON-275877041-project-member] Flavor limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 562.649435] env[62503]: DEBUG nova.virt.hardware [None req-eb0402ba-4447-4563-bad3-226e21db94d7 tempest-ServersAdminNegativeTestJSON-275877041 tempest-ServersAdminNegativeTestJSON-275877041-project-member] Image limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 562.649697] env[62503]: DEBUG nova.virt.hardware [None req-eb0402ba-4447-4563-bad3-226e21db94d7 tempest-ServersAdminNegativeTestJSON-275877041 tempest-ServersAdminNegativeTestJSON-275877041-project-member] Flavor pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 562.649892] env[62503]: DEBUG nova.virt.hardware [None req-eb0402ba-4447-4563-bad3-226e21db94d7 tempest-ServersAdminNegativeTestJSON-275877041 tempest-ServersAdminNegativeTestJSON-275877041-project-member] Image pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 562.650039] env[62503]: DEBUG nova.virt.hardware [None req-eb0402ba-4447-4563-bad3-226e21db94d7 tempest-ServersAdminNegativeTestJSON-275877041 tempest-ServersAdminNegativeTestJSON-275877041-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 562.650243] env[62503]: DEBUG nova.virt.hardware [None req-eb0402ba-4447-4563-bad3-226e21db94d7 tempest-ServersAdminNegativeTestJSON-275877041 tempest-ServersAdminNegativeTestJSON-275877041-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 562.650432] env[62503]: DEBUG nova.virt.hardware [None req-eb0402ba-4447-4563-bad3-226e21db94d7 tempest-ServersAdminNegativeTestJSON-275877041 tempest-ServersAdminNegativeTestJSON-275877041-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 562.650701] env[62503]: DEBUG nova.virt.hardware [None req-eb0402ba-4447-4563-bad3-226e21db94d7 tempest-ServersAdminNegativeTestJSON-275877041 tempest-ServersAdminNegativeTestJSON-275877041-project-member] Got 1 possible topologies {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 562.650807] env[62503]: DEBUG nova.virt.hardware [None req-eb0402ba-4447-4563-bad3-226e21db94d7 tempest-ServersAdminNegativeTestJSON-275877041 tempest-ServersAdminNegativeTestJSON-275877041-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 562.650988] env[62503]: DEBUG nova.virt.hardware [None req-eb0402ba-4447-4563-bad3-226e21db94d7 tempest-ServersAdminNegativeTestJSON-275877041 tempest-ServersAdminNegativeTestJSON-275877041-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 562.651915] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89f16c0f-9a2a-4abe-9d2a-9b5673e18763 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 562.667984] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa32d184-d76f-48e8-a76b-a5cdd3ddcdd8 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 562.674713] env[62503]: DEBUG nova.network.neutron [None req-8e02d996-bd10-462a-bd9c-912409f6543d tempest-ServersAdminTestJSON-732764820 tempest-ServersAdminTestJSON-732764820-project-member] [instance: 24e6abd6-fb6f-49ba-b01b-3977ff205fef] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 562.693935] env[62503]: DEBUG nova.compute.manager [None req-14a931ce-672d-410b-8e2f-8af21632175d tempest-VolumesAdminNegativeTest-585860034 tempest-VolumesAdminNegativeTest-585860034-project-member] [instance: 59fd4a4a-20f5-4b8f-970a-acfc882f45a3] Starting instance... {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 563.089366] env[62503]: DEBUG nova.network.neutron [None req-fe1e164b-4aab-4244-8a41-f3565d333c9b tempest-ServerDiagnosticsTest-976195064 tempest-ServerDiagnosticsTest-976195064-project-member] [instance: d88c42de-dafe-4bb1-bd56-a770524529f3] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 563.178733] env[62503]: DEBUG oslo_concurrency.lockutils [None req-8e02d996-bd10-462a-bd9c-912409f6543d tempest-ServersAdminTestJSON-732764820 tempest-ServersAdminTestJSON-732764820-project-member] Releasing lock "refresh_cache-24e6abd6-fb6f-49ba-b01b-3977ff205fef" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 563.179782] env[62503]: DEBUG nova.compute.manager [None req-8e02d996-bd10-462a-bd9c-912409f6543d tempest-ServersAdminTestJSON-732764820 tempest-ServersAdminTestJSON-732764820-project-member] [instance: 24e6abd6-fb6f-49ba-b01b-3977ff205fef] Start destroying the instance on the hypervisor. {{(pid=62503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3156}} [ 563.180147] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-8e02d996-bd10-462a-bd9c-912409f6543d tempest-ServersAdminTestJSON-732764820 tempest-ServersAdminTestJSON-732764820-project-member] [instance: 24e6abd6-fb6f-49ba-b01b-3977ff205fef] Destroying instance {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 563.181286] env[62503]: DEBUG oslo_concurrency.lockutils [req-3cb6147a-6e58-4d5e-ba6b-7a01955ee1e5 req-a36a4732-9014-4579-b29d-d7ec284891e9 service nova] Acquired lock "refresh_cache-24e6abd6-fb6f-49ba-b01b-3977ff205fef" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 563.182350] env[62503]: DEBUG nova.network.neutron [req-3cb6147a-6e58-4d5e-ba6b-7a01955ee1e5 req-a36a4732-9014-4579-b29d-d7ec284891e9 service nova] [instance: 24e6abd6-fb6f-49ba-b01b-3977ff205fef] Refreshing network info cache for port f85bee01-59b6-479d-9ed7-a1360aedabcf {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 563.185882] env[62503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-96cc723d-9a59-4f24-a845-452171b05af5 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.200729] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-301c7d8b-a14c-447c-ae1f-a6fa397ded64 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.229024] env[62503]: DEBUG nova.network.neutron [None req-fe1e164b-4aab-4244-8a41-f3565d333c9b tempest-ServerDiagnosticsTest-976195064 tempest-ServerDiagnosticsTest-976195064-project-member] [instance: d88c42de-dafe-4bb1-bd56-a770524529f3] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 563.235915] env[62503]: WARNING nova.virt.vmwareapi.vmops [None req-8e02d996-bd10-462a-bd9c-912409f6543d tempest-ServersAdminTestJSON-732764820 tempest-ServersAdminTestJSON-732764820-project-member] [instance: 24e6abd6-fb6f-49ba-b01b-3977ff205fef] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 24e6abd6-fb6f-49ba-b01b-3977ff205fef could not be found. [ 563.235915] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-8e02d996-bd10-462a-bd9c-912409f6543d tempest-ServersAdminTestJSON-732764820 tempest-ServersAdminTestJSON-732764820-project-member] [instance: 24e6abd6-fb6f-49ba-b01b-3977ff205fef] Instance destroyed {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 563.235915] env[62503]: INFO nova.compute.manager [None req-8e02d996-bd10-462a-bd9c-912409f6543d tempest-ServersAdminTestJSON-732764820 tempest-ServersAdminTestJSON-732764820-project-member] [instance: 24e6abd6-fb6f-49ba-b01b-3977ff205fef] Took 0.06 seconds to destroy the instance on the hypervisor. [ 563.235915] env[62503]: DEBUG oslo.service.loopingcall [None req-8e02d996-bd10-462a-bd9c-912409f6543d tempest-ServersAdminTestJSON-732764820 tempest-ServersAdminTestJSON-732764820-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 563.239669] env[62503]: DEBUG nova.compute.manager [-] [instance: 24e6abd6-fb6f-49ba-b01b-3977ff205fef] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 563.239768] env[62503]: DEBUG nova.network.neutron [-] [instance: 24e6abd6-fb6f-49ba-b01b-3977ff205fef] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 563.243355] env[62503]: DEBUG oslo_concurrency.lockutils [None req-14a931ce-672d-410b-8e2f-8af21632175d tempest-VolumesAdminNegativeTest-585860034 tempest-VolumesAdminNegativeTest-585860034-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 563.309051] env[62503]: DEBUG nova.network.neutron [-] [instance: 24e6abd6-fb6f-49ba-b01b-3977ff205fef] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 563.314922] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6610c78a-bf1a-46e7-867c-24950cbf5701 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.326289] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf98527c-7379-43ac-97e9-80cb55aef1ef {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.361116] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00c9dc6e-e9ef-4e7e-a8af-8faabd1d86b3 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.368367] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2c56fa7-ae7d-45ae-bace-9b5dec106ffb {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.383566] env[62503]: DEBUG nova.compute.provider_tree [None req-6237cd83-53e5-445e-991f-0fa75149e2aa tempest-TenantUsagesTestJSON-2099757901 tempest-TenantUsagesTestJSON-2099757901-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 563.741874] env[62503]: DEBUG nova.network.neutron [req-3cb6147a-6e58-4d5e-ba6b-7a01955ee1e5 req-a36a4732-9014-4579-b29d-d7ec284891e9 service nova] [instance: 24e6abd6-fb6f-49ba-b01b-3977ff205fef] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 563.749194] env[62503]: DEBUG oslo_concurrency.lockutils [None req-fe1e164b-4aab-4244-8a41-f3565d333c9b tempest-ServerDiagnosticsTest-976195064 tempest-ServerDiagnosticsTest-976195064-project-member] Releasing lock "refresh_cache-d88c42de-dafe-4bb1-bd56-a770524529f3" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 563.749194] env[62503]: DEBUG nova.compute.manager [None req-fe1e164b-4aab-4244-8a41-f3565d333c9b tempest-ServerDiagnosticsTest-976195064 tempest-ServerDiagnosticsTest-976195064-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62503) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3032}} [ 563.749194] env[62503]: DEBUG nova.compute.manager [None req-fe1e164b-4aab-4244-8a41-f3565d333c9b tempest-ServerDiagnosticsTest-976195064 tempest-ServerDiagnosticsTest-976195064-project-member] [instance: d88c42de-dafe-4bb1-bd56-a770524529f3] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 563.749194] env[62503]: DEBUG nova.network.neutron [None req-fe1e164b-4aab-4244-8a41-f3565d333c9b tempest-ServerDiagnosticsTest-976195064 tempest-ServerDiagnosticsTest-976195064-project-member] [instance: d88c42de-dafe-4bb1-bd56-a770524529f3] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 563.816738] env[62503]: DEBUG nova.network.neutron [-] [instance: 24e6abd6-fb6f-49ba-b01b-3977ff205fef] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 563.888325] env[62503]: DEBUG nova.scheduler.client.report [None req-6237cd83-53e5-445e-991f-0fa75149e2aa tempest-TenantUsagesTestJSON-2099757901 tempest-TenantUsagesTestJSON-2099757901-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 563.900024] env[62503]: DEBUG nova.network.neutron [None req-fe1e164b-4aab-4244-8a41-f3565d333c9b tempest-ServerDiagnosticsTest-976195064 tempest-ServerDiagnosticsTest-976195064-project-member] [instance: d88c42de-dafe-4bb1-bd56-a770524529f3] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 564.103216] env[62503]: DEBUG nova.network.neutron [req-3cb6147a-6e58-4d5e-ba6b-7a01955ee1e5 req-a36a4732-9014-4579-b29d-d7ec284891e9 service nova] [instance: 24e6abd6-fb6f-49ba-b01b-3977ff205fef] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 564.321288] env[62503]: INFO nova.compute.manager [-] [instance: 24e6abd6-fb6f-49ba-b01b-3977ff205fef] Took 1.08 seconds to deallocate network for instance. [ 564.325824] env[62503]: DEBUG nova.compute.claims [None req-8e02d996-bd10-462a-bd9c-912409f6543d tempest-ServersAdminTestJSON-732764820 tempest-ServersAdminTestJSON-732764820-project-member] [instance: 24e6abd6-fb6f-49ba-b01b-3977ff205fef] Aborting claim: {{(pid=62503) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 564.325824] env[62503]: DEBUG oslo_concurrency.lockutils [None req-8e02d996-bd10-462a-bd9c-912409f6543d tempest-ServersAdminTestJSON-732764820 tempest-ServersAdminTestJSON-732764820-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 564.395154] env[62503]: DEBUG oslo_concurrency.lockutils [None req-6237cd83-53e5-445e-991f-0fa75149e2aa tempest-TenantUsagesTestJSON-2099757901 tempest-TenantUsagesTestJSON-2099757901-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.949s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 564.395154] env[62503]: ERROR nova.compute.manager [None req-6237cd83-53e5-445e-991f-0fa75149e2aa tempest-TenantUsagesTestJSON-2099757901 tempest-TenantUsagesTestJSON-2099757901-project-member] [instance: b570c2ad-18ac-42ea-bc2e-009992ece3fe] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 41990c02-c9ae-442d-acc0-e1d2d6642f80, please check neutron logs for more information. [ 564.395154] env[62503]: ERROR nova.compute.manager [instance: b570c2ad-18ac-42ea-bc2e-009992ece3fe] Traceback (most recent call last): [ 564.395154] env[62503]: ERROR nova.compute.manager [instance: b570c2ad-18ac-42ea-bc2e-009992ece3fe] File "/opt/stack/nova/nova/compute/manager.py", line 2644, in _build_and_run_instance [ 564.395154] env[62503]: ERROR nova.compute.manager [instance: b570c2ad-18ac-42ea-bc2e-009992ece3fe] self.driver.spawn(context, instance, image_meta, [ 564.395154] env[62503]: ERROR nova.compute.manager [instance: b570c2ad-18ac-42ea-bc2e-009992ece3fe] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 564.395154] env[62503]: ERROR nova.compute.manager [instance: b570c2ad-18ac-42ea-bc2e-009992ece3fe] self._vmops.spawn(context, instance, image_meta, injected_files, [ 564.395154] env[62503]: ERROR nova.compute.manager [instance: b570c2ad-18ac-42ea-bc2e-009992ece3fe] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 564.395154] env[62503]: ERROR nova.compute.manager [instance: b570c2ad-18ac-42ea-bc2e-009992ece3fe] vm_ref = self.build_virtual_machine(instance, [ 564.395696] env[62503]: ERROR nova.compute.manager [instance: b570c2ad-18ac-42ea-bc2e-009992ece3fe] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 564.395696] env[62503]: ERROR nova.compute.manager [instance: b570c2ad-18ac-42ea-bc2e-009992ece3fe] vif_infos = vmwarevif.get_vif_info(self._session, [ 564.395696] env[62503]: ERROR nova.compute.manager [instance: b570c2ad-18ac-42ea-bc2e-009992ece3fe] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 564.395696] env[62503]: ERROR nova.compute.manager [instance: b570c2ad-18ac-42ea-bc2e-009992ece3fe] for vif in network_info: [ 564.395696] env[62503]: ERROR nova.compute.manager [instance: b570c2ad-18ac-42ea-bc2e-009992ece3fe] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 564.395696] env[62503]: ERROR nova.compute.manager [instance: b570c2ad-18ac-42ea-bc2e-009992ece3fe] return self._sync_wrapper(fn, *args, **kwargs) [ 564.395696] env[62503]: ERROR nova.compute.manager [instance: b570c2ad-18ac-42ea-bc2e-009992ece3fe] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 564.395696] env[62503]: ERROR nova.compute.manager [instance: b570c2ad-18ac-42ea-bc2e-009992ece3fe] self.wait() [ 564.395696] env[62503]: ERROR nova.compute.manager [instance: b570c2ad-18ac-42ea-bc2e-009992ece3fe] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 564.395696] env[62503]: ERROR nova.compute.manager [instance: b570c2ad-18ac-42ea-bc2e-009992ece3fe] self[:] = self._gt.wait() [ 564.395696] env[62503]: ERROR nova.compute.manager [instance: b570c2ad-18ac-42ea-bc2e-009992ece3fe] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 564.395696] env[62503]: ERROR nova.compute.manager [instance: b570c2ad-18ac-42ea-bc2e-009992ece3fe] return self._exit_event.wait() [ 564.395696] env[62503]: ERROR nova.compute.manager [instance: b570c2ad-18ac-42ea-bc2e-009992ece3fe] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 564.396126] env[62503]: ERROR nova.compute.manager [instance: b570c2ad-18ac-42ea-bc2e-009992ece3fe] result = hub.switch() [ 564.396126] env[62503]: ERROR nova.compute.manager [instance: b570c2ad-18ac-42ea-bc2e-009992ece3fe] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 564.396126] env[62503]: ERROR nova.compute.manager [instance: b570c2ad-18ac-42ea-bc2e-009992ece3fe] return self.greenlet.switch() [ 564.396126] env[62503]: ERROR nova.compute.manager [instance: b570c2ad-18ac-42ea-bc2e-009992ece3fe] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 564.396126] env[62503]: ERROR nova.compute.manager [instance: b570c2ad-18ac-42ea-bc2e-009992ece3fe] result = function(*args, **kwargs) [ 564.396126] env[62503]: ERROR nova.compute.manager [instance: b570c2ad-18ac-42ea-bc2e-009992ece3fe] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 564.396126] env[62503]: ERROR nova.compute.manager [instance: b570c2ad-18ac-42ea-bc2e-009992ece3fe] return func(*args, **kwargs) [ 564.396126] env[62503]: ERROR nova.compute.manager [instance: b570c2ad-18ac-42ea-bc2e-009992ece3fe] File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 564.396126] env[62503]: ERROR nova.compute.manager [instance: b570c2ad-18ac-42ea-bc2e-009992ece3fe] raise e [ 564.396126] env[62503]: ERROR nova.compute.manager [instance: b570c2ad-18ac-42ea-bc2e-009992ece3fe] File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 564.396126] env[62503]: ERROR nova.compute.manager [instance: b570c2ad-18ac-42ea-bc2e-009992ece3fe] nwinfo = self.network_api.allocate_for_instance( [ 564.396126] env[62503]: ERROR nova.compute.manager [instance: b570c2ad-18ac-42ea-bc2e-009992ece3fe] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 564.396126] env[62503]: ERROR nova.compute.manager [instance: b570c2ad-18ac-42ea-bc2e-009992ece3fe] created_port_ids = self._update_ports_for_instance( [ 564.396636] env[62503]: ERROR nova.compute.manager [instance: b570c2ad-18ac-42ea-bc2e-009992ece3fe] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 564.396636] env[62503]: ERROR nova.compute.manager [instance: b570c2ad-18ac-42ea-bc2e-009992ece3fe] with excutils.save_and_reraise_exception(): [ 564.396636] env[62503]: ERROR nova.compute.manager [instance: b570c2ad-18ac-42ea-bc2e-009992ece3fe] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 564.396636] env[62503]: ERROR nova.compute.manager [instance: b570c2ad-18ac-42ea-bc2e-009992ece3fe] self.force_reraise() [ 564.396636] env[62503]: ERROR nova.compute.manager [instance: b570c2ad-18ac-42ea-bc2e-009992ece3fe] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 564.396636] env[62503]: ERROR nova.compute.manager [instance: b570c2ad-18ac-42ea-bc2e-009992ece3fe] raise self.value [ 564.396636] env[62503]: ERROR nova.compute.manager [instance: b570c2ad-18ac-42ea-bc2e-009992ece3fe] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 564.396636] env[62503]: ERROR nova.compute.manager [instance: b570c2ad-18ac-42ea-bc2e-009992ece3fe] updated_port = self._update_port( [ 564.396636] env[62503]: ERROR nova.compute.manager [instance: b570c2ad-18ac-42ea-bc2e-009992ece3fe] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 564.396636] env[62503]: ERROR nova.compute.manager [instance: b570c2ad-18ac-42ea-bc2e-009992ece3fe] _ensure_no_port_binding_failure(port) [ 564.396636] env[62503]: ERROR nova.compute.manager [instance: b570c2ad-18ac-42ea-bc2e-009992ece3fe] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 564.396636] env[62503]: ERROR nova.compute.manager [instance: b570c2ad-18ac-42ea-bc2e-009992ece3fe] raise exception.PortBindingFailed(port_id=port['id']) [ 564.397234] env[62503]: ERROR nova.compute.manager [instance: b570c2ad-18ac-42ea-bc2e-009992ece3fe] nova.exception.PortBindingFailed: Binding failed for port 41990c02-c9ae-442d-acc0-e1d2d6642f80, please check neutron logs for more information. [ 564.397234] env[62503]: ERROR nova.compute.manager [instance: b570c2ad-18ac-42ea-bc2e-009992ece3fe] [ 564.397234] env[62503]: DEBUG nova.compute.utils [None req-6237cd83-53e5-445e-991f-0fa75149e2aa tempest-TenantUsagesTestJSON-2099757901 tempest-TenantUsagesTestJSON-2099757901-project-member] [instance: b570c2ad-18ac-42ea-bc2e-009992ece3fe] Binding failed for port 41990c02-c9ae-442d-acc0-e1d2d6642f80, please check neutron logs for more information. {{(pid=62503) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 564.403519] env[62503]: DEBUG nova.compute.manager [None req-6237cd83-53e5-445e-991f-0fa75149e2aa tempest-TenantUsagesTestJSON-2099757901 tempest-TenantUsagesTestJSON-2099757901-project-member] [instance: b570c2ad-18ac-42ea-bc2e-009992ece3fe] Build of instance b570c2ad-18ac-42ea-bc2e-009992ece3fe was re-scheduled: Binding failed for port 41990c02-c9ae-442d-acc0-e1d2d6642f80, please check neutron logs for more information. {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2483}} [ 564.403805] env[62503]: DEBUG nova.compute.manager [None req-6237cd83-53e5-445e-991f-0fa75149e2aa tempest-TenantUsagesTestJSON-2099757901 tempest-TenantUsagesTestJSON-2099757901-project-member] [instance: b570c2ad-18ac-42ea-bc2e-009992ece3fe] Unplugging VIFs for instance {{(pid=62503) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3009}} [ 564.403943] env[62503]: DEBUG oslo_concurrency.lockutils [None req-6237cd83-53e5-445e-991f-0fa75149e2aa tempest-TenantUsagesTestJSON-2099757901 tempest-TenantUsagesTestJSON-2099757901-project-member] Acquiring lock "refresh_cache-b570c2ad-18ac-42ea-bc2e-009992ece3fe" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 564.404075] env[62503]: DEBUG oslo_concurrency.lockutils [None req-6237cd83-53e5-445e-991f-0fa75149e2aa tempest-TenantUsagesTestJSON-2099757901 tempest-TenantUsagesTestJSON-2099757901-project-member] Acquired lock "refresh_cache-b570c2ad-18ac-42ea-bc2e-009992ece3fe" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 564.404181] env[62503]: DEBUG nova.network.neutron [None req-6237cd83-53e5-445e-991f-0fa75149e2aa tempest-TenantUsagesTestJSON-2099757901 tempest-TenantUsagesTestJSON-2099757901-project-member] [instance: b570c2ad-18ac-42ea-bc2e-009992ece3fe] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 564.407585] env[62503]: DEBUG oslo_concurrency.lockutils [None req-b7e47845-e371-42bf-a29f-c41620bf02ac tempest-DeleteServersAdminTestJSON-318637377 tempest-DeleteServersAdminTestJSON-318637377-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 9.059s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 564.412498] env[62503]: DEBUG nova.network.neutron [None req-fe1e164b-4aab-4244-8a41-f3565d333c9b tempest-ServerDiagnosticsTest-976195064 tempest-ServerDiagnosticsTest-976195064-project-member] [instance: d88c42de-dafe-4bb1-bd56-a770524529f3] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 564.607116] env[62503]: DEBUG oslo_concurrency.lockutils [req-3cb6147a-6e58-4d5e-ba6b-7a01955ee1e5 req-a36a4732-9014-4579-b29d-d7ec284891e9 service nova] Releasing lock "refresh_cache-24e6abd6-fb6f-49ba-b01b-3977ff205fef" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 564.793361] env[62503]: ERROR nova.compute.manager [None req-be334e62-09cf-48bb-879c-48dba1066c83 tempest-ServersAdminTestJSON-732764820 tempest-ServersAdminTestJSON-732764820-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port b4bbbe8d-e2ef-4d38-b0c2-6afa9ec6fcf8, please check neutron logs for more information. [ 564.793361] env[62503]: ERROR nova.compute.manager Traceback (most recent call last): [ 564.793361] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 564.793361] env[62503]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 564.793361] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 564.793361] env[62503]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 564.793361] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 564.793361] env[62503]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 564.793361] env[62503]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 564.793361] env[62503]: ERROR nova.compute.manager self.force_reraise() [ 564.793361] env[62503]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 564.793361] env[62503]: ERROR nova.compute.manager raise self.value [ 564.793361] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 564.793361] env[62503]: ERROR nova.compute.manager updated_port = self._update_port( [ 564.793361] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 564.793361] env[62503]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 564.794103] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 564.794103] env[62503]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 564.794103] env[62503]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port b4bbbe8d-e2ef-4d38-b0c2-6afa9ec6fcf8, please check neutron logs for more information. [ 564.794103] env[62503]: ERROR nova.compute.manager [ 564.794103] env[62503]: Traceback (most recent call last): [ 564.794103] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 564.794103] env[62503]: listener.cb(fileno) [ 564.794103] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 564.794103] env[62503]: result = function(*args, **kwargs) [ 564.794103] env[62503]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 564.794103] env[62503]: return func(*args, **kwargs) [ 564.794103] env[62503]: File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 564.794103] env[62503]: raise e [ 564.794103] env[62503]: File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 564.794103] env[62503]: nwinfo = self.network_api.allocate_for_instance( [ 564.794103] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 564.794103] env[62503]: created_port_ids = self._update_ports_for_instance( [ 564.794103] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 564.794103] env[62503]: with excutils.save_and_reraise_exception(): [ 564.794103] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 564.794103] env[62503]: self.force_reraise() [ 564.794103] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 564.794103] env[62503]: raise self.value [ 564.794103] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 564.794103] env[62503]: updated_port = self._update_port( [ 564.794103] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 564.794103] env[62503]: _ensure_no_port_binding_failure(port) [ 564.794103] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 564.794103] env[62503]: raise exception.PortBindingFailed(port_id=port['id']) [ 564.794969] env[62503]: nova.exception.PortBindingFailed: Binding failed for port b4bbbe8d-e2ef-4d38-b0c2-6afa9ec6fcf8, please check neutron logs for more information. [ 564.794969] env[62503]: Removing descriptor: 21 [ 564.794969] env[62503]: ERROR nova.compute.manager [None req-be334e62-09cf-48bb-879c-48dba1066c83 tempest-ServersAdminTestJSON-732764820 tempest-ServersAdminTestJSON-732764820-project-member] [instance: 78599fa1-be64-4797-92a9-ebc3a40b59a1] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port b4bbbe8d-e2ef-4d38-b0c2-6afa9ec6fcf8, please check neutron logs for more information. [ 564.794969] env[62503]: ERROR nova.compute.manager [instance: 78599fa1-be64-4797-92a9-ebc3a40b59a1] Traceback (most recent call last): [ 564.794969] env[62503]: ERROR nova.compute.manager [instance: 78599fa1-be64-4797-92a9-ebc3a40b59a1] File "/opt/stack/nova/nova/compute/manager.py", line 2897, in _build_resources [ 564.794969] env[62503]: ERROR nova.compute.manager [instance: 78599fa1-be64-4797-92a9-ebc3a40b59a1] yield resources [ 564.794969] env[62503]: ERROR nova.compute.manager [instance: 78599fa1-be64-4797-92a9-ebc3a40b59a1] File "/opt/stack/nova/nova/compute/manager.py", line 2644, in _build_and_run_instance [ 564.794969] env[62503]: ERROR nova.compute.manager [instance: 78599fa1-be64-4797-92a9-ebc3a40b59a1] self.driver.spawn(context, instance, image_meta, [ 564.794969] env[62503]: ERROR nova.compute.manager [instance: 78599fa1-be64-4797-92a9-ebc3a40b59a1] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 564.794969] env[62503]: ERROR nova.compute.manager [instance: 78599fa1-be64-4797-92a9-ebc3a40b59a1] self._vmops.spawn(context, instance, image_meta, injected_files, [ 564.794969] env[62503]: ERROR nova.compute.manager [instance: 78599fa1-be64-4797-92a9-ebc3a40b59a1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 564.794969] env[62503]: ERROR nova.compute.manager [instance: 78599fa1-be64-4797-92a9-ebc3a40b59a1] vm_ref = self.build_virtual_machine(instance, [ 564.795325] env[62503]: ERROR nova.compute.manager [instance: 78599fa1-be64-4797-92a9-ebc3a40b59a1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 564.795325] env[62503]: ERROR nova.compute.manager [instance: 78599fa1-be64-4797-92a9-ebc3a40b59a1] vif_infos = vmwarevif.get_vif_info(self._session, [ 564.795325] env[62503]: ERROR nova.compute.manager [instance: 78599fa1-be64-4797-92a9-ebc3a40b59a1] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 564.795325] env[62503]: ERROR nova.compute.manager [instance: 78599fa1-be64-4797-92a9-ebc3a40b59a1] for vif in network_info: [ 564.795325] env[62503]: ERROR nova.compute.manager [instance: 78599fa1-be64-4797-92a9-ebc3a40b59a1] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 564.795325] env[62503]: ERROR nova.compute.manager [instance: 78599fa1-be64-4797-92a9-ebc3a40b59a1] return self._sync_wrapper(fn, *args, **kwargs) [ 564.795325] env[62503]: ERROR nova.compute.manager [instance: 78599fa1-be64-4797-92a9-ebc3a40b59a1] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 564.795325] env[62503]: ERROR nova.compute.manager [instance: 78599fa1-be64-4797-92a9-ebc3a40b59a1] self.wait() [ 564.795325] env[62503]: ERROR nova.compute.manager [instance: 78599fa1-be64-4797-92a9-ebc3a40b59a1] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 564.795325] env[62503]: ERROR nova.compute.manager [instance: 78599fa1-be64-4797-92a9-ebc3a40b59a1] self[:] = self._gt.wait() [ 564.795325] env[62503]: ERROR nova.compute.manager [instance: 78599fa1-be64-4797-92a9-ebc3a40b59a1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 564.795325] env[62503]: ERROR nova.compute.manager [instance: 78599fa1-be64-4797-92a9-ebc3a40b59a1] return self._exit_event.wait() [ 564.795325] env[62503]: ERROR nova.compute.manager [instance: 78599fa1-be64-4797-92a9-ebc3a40b59a1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 564.795688] env[62503]: ERROR nova.compute.manager [instance: 78599fa1-be64-4797-92a9-ebc3a40b59a1] result = hub.switch() [ 564.795688] env[62503]: ERROR nova.compute.manager [instance: 78599fa1-be64-4797-92a9-ebc3a40b59a1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 564.795688] env[62503]: ERROR nova.compute.manager [instance: 78599fa1-be64-4797-92a9-ebc3a40b59a1] return self.greenlet.switch() [ 564.795688] env[62503]: ERROR nova.compute.manager [instance: 78599fa1-be64-4797-92a9-ebc3a40b59a1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 564.795688] env[62503]: ERROR nova.compute.manager [instance: 78599fa1-be64-4797-92a9-ebc3a40b59a1] result = function(*args, **kwargs) [ 564.795688] env[62503]: ERROR nova.compute.manager [instance: 78599fa1-be64-4797-92a9-ebc3a40b59a1] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 564.795688] env[62503]: ERROR nova.compute.manager [instance: 78599fa1-be64-4797-92a9-ebc3a40b59a1] return func(*args, **kwargs) [ 564.795688] env[62503]: ERROR nova.compute.manager [instance: 78599fa1-be64-4797-92a9-ebc3a40b59a1] File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 564.795688] env[62503]: ERROR nova.compute.manager [instance: 78599fa1-be64-4797-92a9-ebc3a40b59a1] raise e [ 564.795688] env[62503]: ERROR nova.compute.manager [instance: 78599fa1-be64-4797-92a9-ebc3a40b59a1] File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 564.795688] env[62503]: ERROR nova.compute.manager [instance: 78599fa1-be64-4797-92a9-ebc3a40b59a1] nwinfo = self.network_api.allocate_for_instance( [ 564.795688] env[62503]: ERROR nova.compute.manager [instance: 78599fa1-be64-4797-92a9-ebc3a40b59a1] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 564.795688] env[62503]: ERROR nova.compute.manager [instance: 78599fa1-be64-4797-92a9-ebc3a40b59a1] created_port_ids = self._update_ports_for_instance( [ 564.798061] env[62503]: ERROR nova.compute.manager [instance: 78599fa1-be64-4797-92a9-ebc3a40b59a1] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 564.798061] env[62503]: ERROR nova.compute.manager [instance: 78599fa1-be64-4797-92a9-ebc3a40b59a1] with excutils.save_and_reraise_exception(): [ 564.798061] env[62503]: ERROR nova.compute.manager [instance: 78599fa1-be64-4797-92a9-ebc3a40b59a1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 564.798061] env[62503]: ERROR nova.compute.manager [instance: 78599fa1-be64-4797-92a9-ebc3a40b59a1] self.force_reraise() [ 564.798061] env[62503]: ERROR nova.compute.manager [instance: 78599fa1-be64-4797-92a9-ebc3a40b59a1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 564.798061] env[62503]: ERROR nova.compute.manager [instance: 78599fa1-be64-4797-92a9-ebc3a40b59a1] raise self.value [ 564.798061] env[62503]: ERROR nova.compute.manager [instance: 78599fa1-be64-4797-92a9-ebc3a40b59a1] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 564.798061] env[62503]: ERROR nova.compute.manager [instance: 78599fa1-be64-4797-92a9-ebc3a40b59a1] updated_port = self._update_port( [ 564.798061] env[62503]: ERROR nova.compute.manager [instance: 78599fa1-be64-4797-92a9-ebc3a40b59a1] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 564.798061] env[62503]: ERROR nova.compute.manager [instance: 78599fa1-be64-4797-92a9-ebc3a40b59a1] _ensure_no_port_binding_failure(port) [ 564.798061] env[62503]: ERROR nova.compute.manager [instance: 78599fa1-be64-4797-92a9-ebc3a40b59a1] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 564.798061] env[62503]: ERROR nova.compute.manager [instance: 78599fa1-be64-4797-92a9-ebc3a40b59a1] raise exception.PortBindingFailed(port_id=port['id']) [ 564.798773] env[62503]: ERROR nova.compute.manager [instance: 78599fa1-be64-4797-92a9-ebc3a40b59a1] nova.exception.PortBindingFailed: Binding failed for port b4bbbe8d-e2ef-4d38-b0c2-6afa9ec6fcf8, please check neutron logs for more information. [ 564.798773] env[62503]: ERROR nova.compute.manager [instance: 78599fa1-be64-4797-92a9-ebc3a40b59a1] [ 564.798773] env[62503]: INFO nova.compute.manager [None req-be334e62-09cf-48bb-879c-48dba1066c83 tempest-ServersAdminTestJSON-732764820 tempest-ServersAdminTestJSON-732764820-project-member] [instance: 78599fa1-be64-4797-92a9-ebc3a40b59a1] Terminating instance [ 564.798773] env[62503]: DEBUG oslo_concurrency.lockutils [None req-be334e62-09cf-48bb-879c-48dba1066c83 tempest-ServersAdminTestJSON-732764820 tempest-ServersAdminTestJSON-732764820-project-member] Acquiring lock "refresh_cache-78599fa1-be64-4797-92a9-ebc3a40b59a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 564.798773] env[62503]: DEBUG oslo_concurrency.lockutils [None req-be334e62-09cf-48bb-879c-48dba1066c83 tempest-ServersAdminTestJSON-732764820 tempest-ServersAdminTestJSON-732764820-project-member] Acquired lock "refresh_cache-78599fa1-be64-4797-92a9-ebc3a40b59a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 564.798773] env[62503]: DEBUG nova.network.neutron [None req-be334e62-09cf-48bb-879c-48dba1066c83 tempest-ServersAdminTestJSON-732764820 tempest-ServersAdminTestJSON-732764820-project-member] [instance: 78599fa1-be64-4797-92a9-ebc3a40b59a1] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 564.887458] env[62503]: DEBUG oslo_concurrency.lockutils [None req-4bad5bb1-7fa9-4e2b-9ed4-ab328e1c526a tempest-ServerActionsTestOtherB-427346871 tempest-ServerActionsTestOtherB-427346871-project-member] Acquiring lock "bef7d4e7-9af2-4071-ae6d-bdbfa7f46460" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 564.887458] env[62503]: DEBUG oslo_concurrency.lockutils [None req-4bad5bb1-7fa9-4e2b-9ed4-ab328e1c526a tempest-ServerActionsTestOtherB-427346871 tempest-ServerActionsTestOtherB-427346871-project-member] Lock "bef7d4e7-9af2-4071-ae6d-bdbfa7f46460" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 564.922059] env[62503]: INFO nova.compute.manager [None req-fe1e164b-4aab-4244-8a41-f3565d333c9b tempest-ServerDiagnosticsTest-976195064 tempest-ServerDiagnosticsTest-976195064-project-member] [instance: d88c42de-dafe-4bb1-bd56-a770524529f3] Took 1.17 seconds to deallocate network for instance. [ 564.959179] env[62503]: DEBUG nova.network.neutron [None req-6237cd83-53e5-445e-991f-0fa75149e2aa tempest-TenantUsagesTestJSON-2099757901 tempest-TenantUsagesTestJSON-2099757901-project-member] [instance: b570c2ad-18ac-42ea-bc2e-009992ece3fe] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 565.014607] env[62503]: ERROR nova.compute.manager [None req-eb0402ba-4447-4563-bad3-226e21db94d7 tempest-ServersAdminNegativeTestJSON-275877041 tempest-ServersAdminNegativeTestJSON-275877041-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 5e215c2e-3299-4635-90c3-831728c1b765, please check neutron logs for more information. [ 565.014607] env[62503]: ERROR nova.compute.manager Traceback (most recent call last): [ 565.014607] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 565.014607] env[62503]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 565.014607] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 565.014607] env[62503]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 565.014607] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 565.014607] env[62503]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 565.014607] env[62503]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 565.014607] env[62503]: ERROR nova.compute.manager self.force_reraise() [ 565.014607] env[62503]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 565.014607] env[62503]: ERROR nova.compute.manager raise self.value [ 565.014607] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 565.014607] env[62503]: ERROR nova.compute.manager updated_port = self._update_port( [ 565.014607] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 565.014607] env[62503]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 565.016384] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 565.016384] env[62503]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 565.016384] env[62503]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 5e215c2e-3299-4635-90c3-831728c1b765, please check neutron logs for more information. [ 565.016384] env[62503]: ERROR nova.compute.manager [ 565.016384] env[62503]: Traceback (most recent call last): [ 565.016384] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 565.016384] env[62503]: listener.cb(fileno) [ 565.016384] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 565.016384] env[62503]: result = function(*args, **kwargs) [ 565.016384] env[62503]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 565.016384] env[62503]: return func(*args, **kwargs) [ 565.016384] env[62503]: File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 565.016384] env[62503]: raise e [ 565.016384] env[62503]: File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 565.016384] env[62503]: nwinfo = self.network_api.allocate_for_instance( [ 565.016384] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 565.016384] env[62503]: created_port_ids = self._update_ports_for_instance( [ 565.016384] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 565.016384] env[62503]: with excutils.save_and_reraise_exception(): [ 565.016384] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 565.016384] env[62503]: self.force_reraise() [ 565.016384] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 565.016384] env[62503]: raise self.value [ 565.016384] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 565.016384] env[62503]: updated_port = self._update_port( [ 565.016384] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 565.016384] env[62503]: _ensure_no_port_binding_failure(port) [ 565.016384] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 565.016384] env[62503]: raise exception.PortBindingFailed(port_id=port['id']) [ 565.019985] env[62503]: nova.exception.PortBindingFailed: Binding failed for port 5e215c2e-3299-4635-90c3-831728c1b765, please check neutron logs for more information. [ 565.019985] env[62503]: Removing descriptor: 14 [ 565.019985] env[62503]: ERROR nova.compute.manager [None req-eb0402ba-4447-4563-bad3-226e21db94d7 tempest-ServersAdminNegativeTestJSON-275877041 tempest-ServersAdminNegativeTestJSON-275877041-project-member] [instance: 9f83ec50-5143-45e1-849a-5c441d2702e2] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 5e215c2e-3299-4635-90c3-831728c1b765, please check neutron logs for more information. [ 565.019985] env[62503]: ERROR nova.compute.manager [instance: 9f83ec50-5143-45e1-849a-5c441d2702e2] Traceback (most recent call last): [ 565.019985] env[62503]: ERROR nova.compute.manager [instance: 9f83ec50-5143-45e1-849a-5c441d2702e2] File "/opt/stack/nova/nova/compute/manager.py", line 2897, in _build_resources [ 565.019985] env[62503]: ERROR nova.compute.manager [instance: 9f83ec50-5143-45e1-849a-5c441d2702e2] yield resources [ 565.019985] env[62503]: ERROR nova.compute.manager [instance: 9f83ec50-5143-45e1-849a-5c441d2702e2] File "/opt/stack/nova/nova/compute/manager.py", line 2644, in _build_and_run_instance [ 565.019985] env[62503]: ERROR nova.compute.manager [instance: 9f83ec50-5143-45e1-849a-5c441d2702e2] self.driver.spawn(context, instance, image_meta, [ 565.019985] env[62503]: ERROR nova.compute.manager [instance: 9f83ec50-5143-45e1-849a-5c441d2702e2] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 565.019985] env[62503]: ERROR nova.compute.manager [instance: 9f83ec50-5143-45e1-849a-5c441d2702e2] self._vmops.spawn(context, instance, image_meta, injected_files, [ 565.019985] env[62503]: ERROR nova.compute.manager [instance: 9f83ec50-5143-45e1-849a-5c441d2702e2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 565.019985] env[62503]: ERROR nova.compute.manager [instance: 9f83ec50-5143-45e1-849a-5c441d2702e2] vm_ref = self.build_virtual_machine(instance, [ 565.020339] env[62503]: ERROR nova.compute.manager [instance: 9f83ec50-5143-45e1-849a-5c441d2702e2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 565.020339] env[62503]: ERROR nova.compute.manager [instance: 9f83ec50-5143-45e1-849a-5c441d2702e2] vif_infos = vmwarevif.get_vif_info(self._session, [ 565.020339] env[62503]: ERROR nova.compute.manager [instance: 9f83ec50-5143-45e1-849a-5c441d2702e2] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 565.020339] env[62503]: ERROR nova.compute.manager [instance: 9f83ec50-5143-45e1-849a-5c441d2702e2] for vif in network_info: [ 565.020339] env[62503]: ERROR nova.compute.manager [instance: 9f83ec50-5143-45e1-849a-5c441d2702e2] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 565.020339] env[62503]: ERROR nova.compute.manager [instance: 9f83ec50-5143-45e1-849a-5c441d2702e2] return self._sync_wrapper(fn, *args, **kwargs) [ 565.020339] env[62503]: ERROR nova.compute.manager [instance: 9f83ec50-5143-45e1-849a-5c441d2702e2] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 565.020339] env[62503]: ERROR nova.compute.manager [instance: 9f83ec50-5143-45e1-849a-5c441d2702e2] self.wait() [ 565.020339] env[62503]: ERROR nova.compute.manager [instance: 9f83ec50-5143-45e1-849a-5c441d2702e2] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 565.020339] env[62503]: ERROR nova.compute.manager [instance: 9f83ec50-5143-45e1-849a-5c441d2702e2] self[:] = self._gt.wait() [ 565.020339] env[62503]: ERROR nova.compute.manager [instance: 9f83ec50-5143-45e1-849a-5c441d2702e2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 565.020339] env[62503]: ERROR nova.compute.manager [instance: 9f83ec50-5143-45e1-849a-5c441d2702e2] return self._exit_event.wait() [ 565.020339] env[62503]: ERROR nova.compute.manager [instance: 9f83ec50-5143-45e1-849a-5c441d2702e2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 565.020685] env[62503]: ERROR nova.compute.manager [instance: 9f83ec50-5143-45e1-849a-5c441d2702e2] result = hub.switch() [ 565.020685] env[62503]: ERROR nova.compute.manager [instance: 9f83ec50-5143-45e1-849a-5c441d2702e2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 565.020685] env[62503]: ERROR nova.compute.manager [instance: 9f83ec50-5143-45e1-849a-5c441d2702e2] return self.greenlet.switch() [ 565.020685] env[62503]: ERROR nova.compute.manager [instance: 9f83ec50-5143-45e1-849a-5c441d2702e2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 565.020685] env[62503]: ERROR nova.compute.manager [instance: 9f83ec50-5143-45e1-849a-5c441d2702e2] result = function(*args, **kwargs) [ 565.020685] env[62503]: ERROR nova.compute.manager [instance: 9f83ec50-5143-45e1-849a-5c441d2702e2] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 565.020685] env[62503]: ERROR nova.compute.manager [instance: 9f83ec50-5143-45e1-849a-5c441d2702e2] return func(*args, **kwargs) [ 565.020685] env[62503]: ERROR nova.compute.manager [instance: 9f83ec50-5143-45e1-849a-5c441d2702e2] File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 565.020685] env[62503]: ERROR nova.compute.manager [instance: 9f83ec50-5143-45e1-849a-5c441d2702e2] raise e [ 565.020685] env[62503]: ERROR nova.compute.manager [instance: 9f83ec50-5143-45e1-849a-5c441d2702e2] File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 565.020685] env[62503]: ERROR nova.compute.manager [instance: 9f83ec50-5143-45e1-849a-5c441d2702e2] nwinfo = self.network_api.allocate_for_instance( [ 565.020685] env[62503]: ERROR nova.compute.manager [instance: 9f83ec50-5143-45e1-849a-5c441d2702e2] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 565.020685] env[62503]: ERROR nova.compute.manager [instance: 9f83ec50-5143-45e1-849a-5c441d2702e2] created_port_ids = self._update_ports_for_instance( [ 565.021011] env[62503]: ERROR nova.compute.manager [instance: 9f83ec50-5143-45e1-849a-5c441d2702e2] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 565.021011] env[62503]: ERROR nova.compute.manager [instance: 9f83ec50-5143-45e1-849a-5c441d2702e2] with excutils.save_and_reraise_exception(): [ 565.021011] env[62503]: ERROR nova.compute.manager [instance: 9f83ec50-5143-45e1-849a-5c441d2702e2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 565.021011] env[62503]: ERROR nova.compute.manager [instance: 9f83ec50-5143-45e1-849a-5c441d2702e2] self.force_reraise() [ 565.021011] env[62503]: ERROR nova.compute.manager [instance: 9f83ec50-5143-45e1-849a-5c441d2702e2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 565.021011] env[62503]: ERROR nova.compute.manager [instance: 9f83ec50-5143-45e1-849a-5c441d2702e2] raise self.value [ 565.021011] env[62503]: ERROR nova.compute.manager [instance: 9f83ec50-5143-45e1-849a-5c441d2702e2] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 565.021011] env[62503]: ERROR nova.compute.manager [instance: 9f83ec50-5143-45e1-849a-5c441d2702e2] updated_port = self._update_port( [ 565.021011] env[62503]: ERROR nova.compute.manager [instance: 9f83ec50-5143-45e1-849a-5c441d2702e2] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 565.021011] env[62503]: ERROR nova.compute.manager [instance: 9f83ec50-5143-45e1-849a-5c441d2702e2] _ensure_no_port_binding_failure(port) [ 565.021011] env[62503]: ERROR nova.compute.manager [instance: 9f83ec50-5143-45e1-849a-5c441d2702e2] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 565.021011] env[62503]: ERROR nova.compute.manager [instance: 9f83ec50-5143-45e1-849a-5c441d2702e2] raise exception.PortBindingFailed(port_id=port['id']) [ 565.021309] env[62503]: ERROR nova.compute.manager [instance: 9f83ec50-5143-45e1-849a-5c441d2702e2] nova.exception.PortBindingFailed: Binding failed for port 5e215c2e-3299-4635-90c3-831728c1b765, please check neutron logs for more information. [ 565.021309] env[62503]: ERROR nova.compute.manager [instance: 9f83ec50-5143-45e1-849a-5c441d2702e2] [ 565.021309] env[62503]: INFO nova.compute.manager [None req-eb0402ba-4447-4563-bad3-226e21db94d7 tempest-ServersAdminNegativeTestJSON-275877041 tempest-ServersAdminNegativeTestJSON-275877041-project-member] [instance: 9f83ec50-5143-45e1-849a-5c441d2702e2] Terminating instance [ 565.021309] env[62503]: DEBUG oslo_concurrency.lockutils [None req-eb0402ba-4447-4563-bad3-226e21db94d7 tempest-ServersAdminNegativeTestJSON-275877041 tempest-ServersAdminNegativeTestJSON-275877041-project-member] Acquiring lock "refresh_cache-9f83ec50-5143-45e1-849a-5c441d2702e2" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 565.021309] env[62503]: DEBUG oslo_concurrency.lockutils [None req-eb0402ba-4447-4563-bad3-226e21db94d7 tempest-ServersAdminNegativeTestJSON-275877041 tempest-ServersAdminNegativeTestJSON-275877041-project-member] Acquired lock "refresh_cache-9f83ec50-5143-45e1-849a-5c441d2702e2" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 565.021309] env[62503]: DEBUG nova.network.neutron [None req-eb0402ba-4447-4563-bad3-226e21db94d7 tempest-ServersAdminNegativeTestJSON-275877041 tempest-ServersAdminNegativeTestJSON-275877041-project-member] [instance: 9f83ec50-5143-45e1-849a-5c441d2702e2] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 565.111054] env[62503]: DEBUG nova.network.neutron [None req-6237cd83-53e5-445e-991f-0fa75149e2aa tempest-TenantUsagesTestJSON-2099757901 tempest-TenantUsagesTestJSON-2099757901-project-member] [instance: b570c2ad-18ac-42ea-bc2e-009992ece3fe] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 565.258531] env[62503]: DEBUG nova.compute.manager [req-5eaeb0cb-3cc5-44a1-9f66-5951f6c34f86 req-50647f2e-c869-49fe-ad53-714a70ef5ef8 service nova] [instance: 24e6abd6-fb6f-49ba-b01b-3977ff205fef] Received event network-vif-deleted-f85bee01-59b6-479d-9ed7-a1360aedabcf {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 565.265001] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f52c240-0e9e-4c52-a672-6214d3547adb {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 565.280127] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1eb55ec-7f7b-4ae8-a408-e2ecea9e68d4 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 565.323073] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-482c0621-c6cd-493d-9ee1-79ccb7e0c540 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 565.334028] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd96e77c-7e17-436d-9534-6d3980d6db25 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 565.352109] env[62503]: DEBUG nova.compute.provider_tree [None req-b7e47845-e371-42bf-a29f-c41620bf02ac tempest-DeleteServersAdminTestJSON-318637377 tempest-DeleteServersAdminTestJSON-318637377-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 565.354280] env[62503]: DEBUG nova.network.neutron [None req-be334e62-09cf-48bb-879c-48dba1066c83 tempest-ServersAdminTestJSON-732764820 tempest-ServersAdminTestJSON-732764820-project-member] [instance: 78599fa1-be64-4797-92a9-ebc3a40b59a1] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 565.543613] env[62503]: DEBUG nova.network.neutron [None req-eb0402ba-4447-4563-bad3-226e21db94d7 tempest-ServersAdminNegativeTestJSON-275877041 tempest-ServersAdminNegativeTestJSON-275877041-project-member] [instance: 9f83ec50-5143-45e1-849a-5c441d2702e2] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 565.561123] env[62503]: DEBUG nova.network.neutron [None req-be334e62-09cf-48bb-879c-48dba1066c83 tempest-ServersAdminTestJSON-732764820 tempest-ServersAdminTestJSON-732764820-project-member] [instance: 78599fa1-be64-4797-92a9-ebc3a40b59a1] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 565.616673] env[62503]: DEBUG oslo_concurrency.lockutils [None req-6237cd83-53e5-445e-991f-0fa75149e2aa tempest-TenantUsagesTestJSON-2099757901 tempest-TenantUsagesTestJSON-2099757901-project-member] Releasing lock "refresh_cache-b570c2ad-18ac-42ea-bc2e-009992ece3fe" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 565.616673] env[62503]: DEBUG nova.compute.manager [None req-6237cd83-53e5-445e-991f-0fa75149e2aa tempest-TenantUsagesTestJSON-2099757901 tempest-TenantUsagesTestJSON-2099757901-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62503) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3032}} [ 565.616673] env[62503]: DEBUG nova.compute.manager [None req-6237cd83-53e5-445e-991f-0fa75149e2aa tempest-TenantUsagesTestJSON-2099757901 tempest-TenantUsagesTestJSON-2099757901-project-member] [instance: b570c2ad-18ac-42ea-bc2e-009992ece3fe] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 565.616673] env[62503]: DEBUG nova.network.neutron [None req-6237cd83-53e5-445e-991f-0fa75149e2aa tempest-TenantUsagesTestJSON-2099757901 tempest-TenantUsagesTestJSON-2099757901-project-member] [instance: b570c2ad-18ac-42ea-bc2e-009992ece3fe] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 565.626510] env[62503]: DEBUG nova.network.neutron [None req-eb0402ba-4447-4563-bad3-226e21db94d7 tempest-ServersAdminNegativeTestJSON-275877041 tempest-ServersAdminNegativeTestJSON-275877041-project-member] [instance: 9f83ec50-5143-45e1-849a-5c441d2702e2] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 565.636935] env[62503]: DEBUG nova.network.neutron [None req-6237cd83-53e5-445e-991f-0fa75149e2aa tempest-TenantUsagesTestJSON-2099757901 tempest-TenantUsagesTestJSON-2099757901-project-member] [instance: b570c2ad-18ac-42ea-bc2e-009992ece3fe] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 565.858199] env[62503]: DEBUG nova.scheduler.client.report [None req-b7e47845-e371-42bf-a29f-c41620bf02ac tempest-DeleteServersAdminTestJSON-318637377 tempest-DeleteServersAdminTestJSON-318637377-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 565.965670] env[62503]: INFO nova.scheduler.client.report [None req-fe1e164b-4aab-4244-8a41-f3565d333c9b tempest-ServerDiagnosticsTest-976195064 tempest-ServerDiagnosticsTest-976195064-project-member] Deleted allocations for instance d88c42de-dafe-4bb1-bd56-a770524529f3 [ 566.069519] env[62503]: DEBUG oslo_concurrency.lockutils [None req-be334e62-09cf-48bb-879c-48dba1066c83 tempest-ServersAdminTestJSON-732764820 tempest-ServersAdminTestJSON-732764820-project-member] Releasing lock "refresh_cache-78599fa1-be64-4797-92a9-ebc3a40b59a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 566.069881] env[62503]: DEBUG nova.compute.manager [None req-be334e62-09cf-48bb-879c-48dba1066c83 tempest-ServersAdminTestJSON-732764820 tempest-ServersAdminTestJSON-732764820-project-member] [instance: 78599fa1-be64-4797-92a9-ebc3a40b59a1] Start destroying the instance on the hypervisor. {{(pid=62503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3156}} [ 566.070176] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-be334e62-09cf-48bb-879c-48dba1066c83 tempest-ServersAdminTestJSON-732764820 tempest-ServersAdminTestJSON-732764820-project-member] [instance: 78599fa1-be64-4797-92a9-ebc3a40b59a1] Destroying instance {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 566.073058] env[62503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f8fcea06-9ae3-4d41-8e5c-6da1a548b457 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.088316] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2bb1eda-5613-49a0-a862-ca838ef06621 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.121942] env[62503]: WARNING nova.virt.vmwareapi.vmops [None req-be334e62-09cf-48bb-879c-48dba1066c83 tempest-ServersAdminTestJSON-732764820 tempest-ServersAdminTestJSON-732764820-project-member] [instance: 78599fa1-be64-4797-92a9-ebc3a40b59a1] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 78599fa1-be64-4797-92a9-ebc3a40b59a1 could not be found. [ 566.121942] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-be334e62-09cf-48bb-879c-48dba1066c83 tempest-ServersAdminTestJSON-732764820 tempest-ServersAdminTestJSON-732764820-project-member] [instance: 78599fa1-be64-4797-92a9-ebc3a40b59a1] Instance destroyed {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 566.122616] env[62503]: INFO nova.compute.manager [None req-be334e62-09cf-48bb-879c-48dba1066c83 tempest-ServersAdminTestJSON-732764820 tempest-ServersAdminTestJSON-732764820-project-member] [instance: 78599fa1-be64-4797-92a9-ebc3a40b59a1] Took 0.05 seconds to destroy the instance on the hypervisor. [ 566.122928] env[62503]: DEBUG oslo.service.loopingcall [None req-be334e62-09cf-48bb-879c-48dba1066c83 tempest-ServersAdminTestJSON-732764820 tempest-ServersAdminTestJSON-732764820-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 566.123210] env[62503]: DEBUG nova.compute.manager [-] [instance: 78599fa1-be64-4797-92a9-ebc3a40b59a1] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 566.123302] env[62503]: DEBUG nova.network.neutron [-] [instance: 78599fa1-be64-4797-92a9-ebc3a40b59a1] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 566.130776] env[62503]: DEBUG oslo_concurrency.lockutils [None req-eb0402ba-4447-4563-bad3-226e21db94d7 tempest-ServersAdminNegativeTestJSON-275877041 tempest-ServersAdminNegativeTestJSON-275877041-project-member] Releasing lock "refresh_cache-9f83ec50-5143-45e1-849a-5c441d2702e2" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 566.131188] env[62503]: DEBUG nova.compute.manager [None req-eb0402ba-4447-4563-bad3-226e21db94d7 tempest-ServersAdminNegativeTestJSON-275877041 tempest-ServersAdminNegativeTestJSON-275877041-project-member] [instance: 9f83ec50-5143-45e1-849a-5c441d2702e2] Start destroying the instance on the hypervisor. {{(pid=62503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3156}} [ 566.131774] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-eb0402ba-4447-4563-bad3-226e21db94d7 tempest-ServersAdminNegativeTestJSON-275877041 tempest-ServersAdminNegativeTestJSON-275877041-project-member] [instance: 9f83ec50-5143-45e1-849a-5c441d2702e2] Destroying instance {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 566.131774] env[62503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0420f1e7-fab3-4386-85ad-7fb99f107c5e {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.140929] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66fd24b4-431f-4093-883a-b9f77fe21705 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.151630] env[62503]: DEBUG nova.network.neutron [-] [instance: 78599fa1-be64-4797-92a9-ebc3a40b59a1] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 566.153591] env[62503]: DEBUG nova.network.neutron [None req-6237cd83-53e5-445e-991f-0fa75149e2aa tempest-TenantUsagesTestJSON-2099757901 tempest-TenantUsagesTestJSON-2099757901-project-member] [instance: b570c2ad-18ac-42ea-bc2e-009992ece3fe] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 566.171201] env[62503]: WARNING nova.virt.vmwareapi.vmops [None req-eb0402ba-4447-4563-bad3-226e21db94d7 tempest-ServersAdminNegativeTestJSON-275877041 tempest-ServersAdminNegativeTestJSON-275877041-project-member] [instance: 9f83ec50-5143-45e1-849a-5c441d2702e2] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 9f83ec50-5143-45e1-849a-5c441d2702e2 could not be found. [ 566.171201] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-eb0402ba-4447-4563-bad3-226e21db94d7 tempest-ServersAdminNegativeTestJSON-275877041 tempest-ServersAdminNegativeTestJSON-275877041-project-member] [instance: 9f83ec50-5143-45e1-849a-5c441d2702e2] Instance destroyed {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 566.171336] env[62503]: INFO nova.compute.manager [None req-eb0402ba-4447-4563-bad3-226e21db94d7 tempest-ServersAdminNegativeTestJSON-275877041 tempest-ServersAdminNegativeTestJSON-275877041-project-member] [instance: 9f83ec50-5143-45e1-849a-5c441d2702e2] Took 0.04 seconds to destroy the instance on the hypervisor. [ 566.172358] env[62503]: DEBUG oslo.service.loopingcall [None req-eb0402ba-4447-4563-bad3-226e21db94d7 tempest-ServersAdminNegativeTestJSON-275877041 tempest-ServersAdminNegativeTestJSON-275877041-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 566.173477] env[62503]: DEBUG nova.compute.manager [-] [instance: 9f83ec50-5143-45e1-849a-5c441d2702e2] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 566.173477] env[62503]: DEBUG nova.network.neutron [-] [instance: 9f83ec50-5143-45e1-849a-5c441d2702e2] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 566.204772] env[62503]: DEBUG nova.network.neutron [-] [instance: 9f83ec50-5143-45e1-849a-5c441d2702e2] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 566.316057] env[62503]: DEBUG oslo_concurrency.lockutils [None req-68ed7e64-78f9-431d-ac11-4e539144d770 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Acquiring lock "eed1dcc3-d9f9-4211-a4c3-850dcdad72b1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 566.316057] env[62503]: DEBUG oslo_concurrency.lockutils [None req-68ed7e64-78f9-431d-ac11-4e539144d770 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Lock "eed1dcc3-d9f9-4211-a4c3-850dcdad72b1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 566.366204] env[62503]: DEBUG oslo_concurrency.lockutils [None req-b7e47845-e371-42bf-a29f-c41620bf02ac tempest-DeleteServersAdminTestJSON-318637377 tempest-DeleteServersAdminTestJSON-318637377-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.959s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 566.366885] env[62503]: ERROR nova.compute.manager [None req-b7e47845-e371-42bf-a29f-c41620bf02ac tempest-DeleteServersAdminTestJSON-318637377 tempest-DeleteServersAdminTestJSON-318637377-project-member] [instance: f8142528-e04c-444a-a252-84e98cecee74] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port bda1f3f8-ef2f-4b97-9bce-d4110f60c4eb, please check neutron logs for more information. [ 566.366885] env[62503]: ERROR nova.compute.manager [instance: f8142528-e04c-444a-a252-84e98cecee74] Traceback (most recent call last): [ 566.366885] env[62503]: ERROR nova.compute.manager [instance: f8142528-e04c-444a-a252-84e98cecee74] File "/opt/stack/nova/nova/compute/manager.py", line 2644, in _build_and_run_instance [ 566.366885] env[62503]: ERROR nova.compute.manager [instance: f8142528-e04c-444a-a252-84e98cecee74] self.driver.spawn(context, instance, image_meta, [ 566.366885] env[62503]: ERROR nova.compute.manager [instance: f8142528-e04c-444a-a252-84e98cecee74] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 566.366885] env[62503]: ERROR nova.compute.manager [instance: f8142528-e04c-444a-a252-84e98cecee74] self._vmops.spawn(context, instance, image_meta, injected_files, [ 566.366885] env[62503]: ERROR nova.compute.manager [instance: f8142528-e04c-444a-a252-84e98cecee74] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 566.366885] env[62503]: ERROR nova.compute.manager [instance: f8142528-e04c-444a-a252-84e98cecee74] vm_ref = self.build_virtual_machine(instance, [ 566.366885] env[62503]: ERROR nova.compute.manager [instance: f8142528-e04c-444a-a252-84e98cecee74] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 566.366885] env[62503]: ERROR nova.compute.manager [instance: f8142528-e04c-444a-a252-84e98cecee74] vif_infos = vmwarevif.get_vif_info(self._session, [ 566.366885] env[62503]: ERROR nova.compute.manager [instance: f8142528-e04c-444a-a252-84e98cecee74] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 566.367258] env[62503]: ERROR nova.compute.manager [instance: f8142528-e04c-444a-a252-84e98cecee74] for vif in network_info: [ 566.367258] env[62503]: ERROR nova.compute.manager [instance: f8142528-e04c-444a-a252-84e98cecee74] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 566.367258] env[62503]: ERROR nova.compute.manager [instance: f8142528-e04c-444a-a252-84e98cecee74] return self._sync_wrapper(fn, *args, **kwargs) [ 566.367258] env[62503]: ERROR nova.compute.manager [instance: f8142528-e04c-444a-a252-84e98cecee74] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 566.367258] env[62503]: ERROR nova.compute.manager [instance: f8142528-e04c-444a-a252-84e98cecee74] self.wait() [ 566.367258] env[62503]: ERROR nova.compute.manager [instance: f8142528-e04c-444a-a252-84e98cecee74] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 566.367258] env[62503]: ERROR nova.compute.manager [instance: f8142528-e04c-444a-a252-84e98cecee74] self[:] = self._gt.wait() [ 566.367258] env[62503]: ERROR nova.compute.manager [instance: f8142528-e04c-444a-a252-84e98cecee74] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 566.367258] env[62503]: ERROR nova.compute.manager [instance: f8142528-e04c-444a-a252-84e98cecee74] return self._exit_event.wait() [ 566.367258] env[62503]: ERROR nova.compute.manager [instance: f8142528-e04c-444a-a252-84e98cecee74] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 566.367258] env[62503]: ERROR nova.compute.manager [instance: f8142528-e04c-444a-a252-84e98cecee74] result = hub.switch() [ 566.367258] env[62503]: ERROR nova.compute.manager [instance: f8142528-e04c-444a-a252-84e98cecee74] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 566.367258] env[62503]: ERROR nova.compute.manager [instance: f8142528-e04c-444a-a252-84e98cecee74] return self.greenlet.switch() [ 566.367634] env[62503]: ERROR nova.compute.manager [instance: f8142528-e04c-444a-a252-84e98cecee74] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 566.367634] env[62503]: ERROR nova.compute.manager [instance: f8142528-e04c-444a-a252-84e98cecee74] result = function(*args, **kwargs) [ 566.367634] env[62503]: ERROR nova.compute.manager [instance: f8142528-e04c-444a-a252-84e98cecee74] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 566.367634] env[62503]: ERROR nova.compute.manager [instance: f8142528-e04c-444a-a252-84e98cecee74] return func(*args, **kwargs) [ 566.367634] env[62503]: ERROR nova.compute.manager [instance: f8142528-e04c-444a-a252-84e98cecee74] File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 566.367634] env[62503]: ERROR nova.compute.manager [instance: f8142528-e04c-444a-a252-84e98cecee74] raise e [ 566.367634] env[62503]: ERROR nova.compute.manager [instance: f8142528-e04c-444a-a252-84e98cecee74] File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 566.367634] env[62503]: ERROR nova.compute.manager [instance: f8142528-e04c-444a-a252-84e98cecee74] nwinfo = self.network_api.allocate_for_instance( [ 566.367634] env[62503]: ERROR nova.compute.manager [instance: f8142528-e04c-444a-a252-84e98cecee74] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 566.367634] env[62503]: ERROR nova.compute.manager [instance: f8142528-e04c-444a-a252-84e98cecee74] created_port_ids = self._update_ports_for_instance( [ 566.367634] env[62503]: ERROR nova.compute.manager [instance: f8142528-e04c-444a-a252-84e98cecee74] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 566.367634] env[62503]: ERROR nova.compute.manager [instance: f8142528-e04c-444a-a252-84e98cecee74] with excutils.save_and_reraise_exception(): [ 566.367634] env[62503]: ERROR nova.compute.manager [instance: f8142528-e04c-444a-a252-84e98cecee74] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 566.367975] env[62503]: ERROR nova.compute.manager [instance: f8142528-e04c-444a-a252-84e98cecee74] self.force_reraise() [ 566.367975] env[62503]: ERROR nova.compute.manager [instance: f8142528-e04c-444a-a252-84e98cecee74] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 566.367975] env[62503]: ERROR nova.compute.manager [instance: f8142528-e04c-444a-a252-84e98cecee74] raise self.value [ 566.367975] env[62503]: ERROR nova.compute.manager [instance: f8142528-e04c-444a-a252-84e98cecee74] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 566.367975] env[62503]: ERROR nova.compute.manager [instance: f8142528-e04c-444a-a252-84e98cecee74] updated_port = self._update_port( [ 566.367975] env[62503]: ERROR nova.compute.manager [instance: f8142528-e04c-444a-a252-84e98cecee74] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 566.367975] env[62503]: ERROR nova.compute.manager [instance: f8142528-e04c-444a-a252-84e98cecee74] _ensure_no_port_binding_failure(port) [ 566.367975] env[62503]: ERROR nova.compute.manager [instance: f8142528-e04c-444a-a252-84e98cecee74] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 566.367975] env[62503]: ERROR nova.compute.manager [instance: f8142528-e04c-444a-a252-84e98cecee74] raise exception.PortBindingFailed(port_id=port['id']) [ 566.367975] env[62503]: ERROR nova.compute.manager [instance: f8142528-e04c-444a-a252-84e98cecee74] nova.exception.PortBindingFailed: Binding failed for port bda1f3f8-ef2f-4b97-9bce-d4110f60c4eb, please check neutron logs for more information. [ 566.367975] env[62503]: ERROR nova.compute.manager [instance: f8142528-e04c-444a-a252-84e98cecee74] [ 566.368244] env[62503]: DEBUG nova.compute.utils [None req-b7e47845-e371-42bf-a29f-c41620bf02ac tempest-DeleteServersAdminTestJSON-318637377 tempest-DeleteServersAdminTestJSON-318637377-project-member] [instance: f8142528-e04c-444a-a252-84e98cecee74] Binding failed for port bda1f3f8-ef2f-4b97-9bce-d4110f60c4eb, please check neutron logs for more information. {{(pid=62503) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 566.368919] env[62503]: DEBUG oslo_concurrency.lockutils [None req-5be7067b-f1af-42fc-acec-92c0c71cc7a8 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.604s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 566.371353] env[62503]: INFO nova.compute.claims [None req-5be7067b-f1af-42fc-acec-92c0c71cc7a8 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] [instance: c4a88e75-690f-4bed-a4f9-a0de3b193eff] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 566.377265] env[62503]: DEBUG nova.compute.manager [None req-b7e47845-e371-42bf-a29f-c41620bf02ac tempest-DeleteServersAdminTestJSON-318637377 tempest-DeleteServersAdminTestJSON-318637377-project-member] [instance: f8142528-e04c-444a-a252-84e98cecee74] Build of instance f8142528-e04c-444a-a252-84e98cecee74 was re-scheduled: Binding failed for port bda1f3f8-ef2f-4b97-9bce-d4110f60c4eb, please check neutron logs for more information. {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2483}} [ 566.377781] env[62503]: DEBUG nova.compute.manager [None req-b7e47845-e371-42bf-a29f-c41620bf02ac tempest-DeleteServersAdminTestJSON-318637377 tempest-DeleteServersAdminTestJSON-318637377-project-member] [instance: f8142528-e04c-444a-a252-84e98cecee74] Unplugging VIFs for instance {{(pid=62503) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3009}} [ 566.378059] env[62503]: DEBUG oslo_concurrency.lockutils [None req-b7e47845-e371-42bf-a29f-c41620bf02ac tempest-DeleteServersAdminTestJSON-318637377 tempest-DeleteServersAdminTestJSON-318637377-project-member] Acquiring lock "refresh_cache-f8142528-e04c-444a-a252-84e98cecee74" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 566.378745] env[62503]: DEBUG oslo_concurrency.lockutils [None req-b7e47845-e371-42bf-a29f-c41620bf02ac tempest-DeleteServersAdminTestJSON-318637377 tempest-DeleteServersAdminTestJSON-318637377-project-member] Acquired lock "refresh_cache-f8142528-e04c-444a-a252-84e98cecee74" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 566.378745] env[62503]: DEBUG nova.network.neutron [None req-b7e47845-e371-42bf-a29f-c41620bf02ac tempest-DeleteServersAdminTestJSON-318637377 tempest-DeleteServersAdminTestJSON-318637377-project-member] [instance: f8142528-e04c-444a-a252-84e98cecee74] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 566.383986] env[62503]: DEBUG nova.compute.manager [req-90a03e23-c042-4864-bfa2-a965c0281a59 req-db51c312-d1d4-406f-a6b1-334b32deebc0 service nova] [instance: 9f83ec50-5143-45e1-849a-5c441d2702e2] Received event network-changed-5e215c2e-3299-4635-90c3-831728c1b765 {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 566.383986] env[62503]: DEBUG nova.compute.manager [req-90a03e23-c042-4864-bfa2-a965c0281a59 req-db51c312-d1d4-406f-a6b1-334b32deebc0 service nova] [instance: 9f83ec50-5143-45e1-849a-5c441d2702e2] Refreshing instance network info cache due to event network-changed-5e215c2e-3299-4635-90c3-831728c1b765. {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11432}} [ 566.383986] env[62503]: DEBUG oslo_concurrency.lockutils [req-90a03e23-c042-4864-bfa2-a965c0281a59 req-db51c312-d1d4-406f-a6b1-334b32deebc0 service nova] Acquiring lock "refresh_cache-9f83ec50-5143-45e1-849a-5c441d2702e2" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 566.383986] env[62503]: DEBUG oslo_concurrency.lockutils [req-90a03e23-c042-4864-bfa2-a965c0281a59 req-db51c312-d1d4-406f-a6b1-334b32deebc0 service nova] Acquired lock "refresh_cache-9f83ec50-5143-45e1-849a-5c441d2702e2" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 566.383986] env[62503]: DEBUG nova.network.neutron [req-90a03e23-c042-4864-bfa2-a965c0281a59 req-db51c312-d1d4-406f-a6b1-334b32deebc0 service nova] [instance: 9f83ec50-5143-45e1-849a-5c441d2702e2] Refreshing network info cache for port 5e215c2e-3299-4635-90c3-831728c1b765 {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 566.474535] env[62503]: DEBUG oslo_concurrency.lockutils [None req-fe1e164b-4aab-4244-8a41-f3565d333c9b tempest-ServerDiagnosticsTest-976195064 tempest-ServerDiagnosticsTest-976195064-project-member] Lock "d88c42de-dafe-4bb1-bd56-a770524529f3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 31.753s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 566.654781] env[62503]: DEBUG nova.network.neutron [-] [instance: 78599fa1-be64-4797-92a9-ebc3a40b59a1] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 566.657039] env[62503]: INFO nova.compute.manager [None req-6237cd83-53e5-445e-991f-0fa75149e2aa tempest-TenantUsagesTestJSON-2099757901 tempest-TenantUsagesTestJSON-2099757901-project-member] [instance: b570c2ad-18ac-42ea-bc2e-009992ece3fe] Took 1.04 seconds to deallocate network for instance. [ 566.707382] env[62503]: DEBUG nova.network.neutron [-] [instance: 9f83ec50-5143-45e1-849a-5c441d2702e2] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 566.909825] env[62503]: DEBUG nova.network.neutron [None req-b7e47845-e371-42bf-a29f-c41620bf02ac tempest-DeleteServersAdminTestJSON-318637377 tempest-DeleteServersAdminTestJSON-318637377-project-member] [instance: f8142528-e04c-444a-a252-84e98cecee74] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 566.922528] env[62503]: DEBUG nova.network.neutron [req-90a03e23-c042-4864-bfa2-a965c0281a59 req-db51c312-d1d4-406f-a6b1-334b32deebc0 service nova] [instance: 9f83ec50-5143-45e1-849a-5c441d2702e2] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 566.979231] env[62503]: DEBUG nova.compute.manager [None req-c5889fb0-1c0e-4d03-b6a4-67f76941137f tempest-ServersTestFqdnHostnames-1580407543 tempest-ServersTestFqdnHostnames-1580407543-project-member] [instance: 7a0b2744-2bb0-4eee-9861-418ba67b719c] Starting instance... {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 567.003178] env[62503]: DEBUG nova.network.neutron [None req-b7e47845-e371-42bf-a29f-c41620bf02ac tempest-DeleteServersAdminTestJSON-318637377 tempest-DeleteServersAdminTestJSON-318637377-project-member] [instance: f8142528-e04c-444a-a252-84e98cecee74] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 567.067810] env[62503]: DEBUG nova.network.neutron [req-90a03e23-c042-4864-bfa2-a965c0281a59 req-db51c312-d1d4-406f-a6b1-334b32deebc0 service nova] [instance: 9f83ec50-5143-45e1-849a-5c441d2702e2] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 567.160952] env[62503]: INFO nova.compute.manager [-] [instance: 78599fa1-be64-4797-92a9-ebc3a40b59a1] Took 1.04 seconds to deallocate network for instance. [ 567.165527] env[62503]: DEBUG nova.compute.claims [None req-be334e62-09cf-48bb-879c-48dba1066c83 tempest-ServersAdminTestJSON-732764820 tempest-ServersAdminTestJSON-732764820-project-member] [instance: 78599fa1-be64-4797-92a9-ebc3a40b59a1] Aborting claim: {{(pid=62503) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 567.165699] env[62503]: DEBUG oslo_concurrency.lockutils [None req-be334e62-09cf-48bb-879c-48dba1066c83 tempest-ServersAdminTestJSON-732764820 tempest-ServersAdminTestJSON-732764820-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 567.211185] env[62503]: INFO nova.compute.manager [-] [instance: 9f83ec50-5143-45e1-849a-5c441d2702e2] Took 1.04 seconds to deallocate network for instance. [ 567.214433] env[62503]: DEBUG nova.compute.claims [None req-eb0402ba-4447-4563-bad3-226e21db94d7 tempest-ServersAdminNegativeTestJSON-275877041 tempest-ServersAdminNegativeTestJSON-275877041-project-member] [instance: 9f83ec50-5143-45e1-849a-5c441d2702e2] Aborting claim: {{(pid=62503) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 567.214698] env[62503]: DEBUG oslo_concurrency.lockutils [None req-eb0402ba-4447-4563-bad3-226e21db94d7 tempest-ServersAdminNegativeTestJSON-275877041 tempest-ServersAdminNegativeTestJSON-275877041-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 567.509109] env[62503]: DEBUG oslo_concurrency.lockutils [None req-b7e47845-e371-42bf-a29f-c41620bf02ac tempest-DeleteServersAdminTestJSON-318637377 tempest-DeleteServersAdminTestJSON-318637377-project-member] Releasing lock "refresh_cache-f8142528-e04c-444a-a252-84e98cecee74" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 567.510510] env[62503]: DEBUG nova.compute.manager [None req-b7e47845-e371-42bf-a29f-c41620bf02ac tempest-DeleteServersAdminTestJSON-318637377 tempest-DeleteServersAdminTestJSON-318637377-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62503) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3032}} [ 567.510510] env[62503]: DEBUG nova.compute.manager [None req-b7e47845-e371-42bf-a29f-c41620bf02ac tempest-DeleteServersAdminTestJSON-318637377 tempest-DeleteServersAdminTestJSON-318637377-project-member] [instance: f8142528-e04c-444a-a252-84e98cecee74] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 567.510510] env[62503]: DEBUG nova.network.neutron [None req-b7e47845-e371-42bf-a29f-c41620bf02ac tempest-DeleteServersAdminTestJSON-318637377 tempest-DeleteServersAdminTestJSON-318637377-project-member] [instance: f8142528-e04c-444a-a252-84e98cecee74] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 567.514010] env[62503]: DEBUG oslo_concurrency.lockutils [None req-c5889fb0-1c0e-4d03-b6a4-67f76941137f tempest-ServersTestFqdnHostnames-1580407543 tempest-ServersTestFqdnHostnames-1580407543-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 567.540482] env[62503]: DEBUG nova.network.neutron [None req-b7e47845-e371-42bf-a29f-c41620bf02ac tempest-DeleteServersAdminTestJSON-318637377 tempest-DeleteServersAdminTestJSON-318637377-project-member] [instance: f8142528-e04c-444a-a252-84e98cecee74] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 567.572610] env[62503]: DEBUG oslo_concurrency.lockutils [req-90a03e23-c042-4864-bfa2-a965c0281a59 req-db51c312-d1d4-406f-a6b1-334b32deebc0 service nova] Releasing lock "refresh_cache-9f83ec50-5143-45e1-849a-5c441d2702e2" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 567.698686] env[62503]: INFO nova.scheduler.client.report [None req-6237cd83-53e5-445e-991f-0fa75149e2aa tempest-TenantUsagesTestJSON-2099757901 tempest-TenantUsagesTestJSON-2099757901-project-member] Deleted allocations for instance b570c2ad-18ac-42ea-bc2e-009992ece3fe [ 567.730287] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fc59da8-fa15-4837-8191-c3b5ca959c2e {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.740674] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbc7293f-2f5c-433b-86b9-f343be5ab57c {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.771951] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c7db666-fdec-4569-9a78-9d57982f8f3e {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.780123] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1e01d4e-2548-48c2-9332-6e504afbd890 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.794597] env[62503]: DEBUG nova.compute.provider_tree [None req-5be7067b-f1af-42fc-acec-92c0c71cc7a8 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 568.001512] env[62503]: DEBUG oslo_concurrency.lockutils [None req-6127b9b5-a976-4771-896a-db01ec307bda tempest-ImagesOneServerTestJSON-1249934967 tempest-ImagesOneServerTestJSON-1249934967-project-member] Acquiring lock "1a27278b-b930-4432-90f2-45cdf025c83e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 568.001512] env[62503]: DEBUG oslo_concurrency.lockutils [None req-6127b9b5-a976-4771-896a-db01ec307bda tempest-ImagesOneServerTestJSON-1249934967 tempest-ImagesOneServerTestJSON-1249934967-project-member] Lock "1a27278b-b930-4432-90f2-45cdf025c83e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 568.045202] env[62503]: DEBUG nova.network.neutron [None req-b7e47845-e371-42bf-a29f-c41620bf02ac tempest-DeleteServersAdminTestJSON-318637377 tempest-DeleteServersAdminTestJSON-318637377-project-member] [instance: f8142528-e04c-444a-a252-84e98cecee74] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 568.208647] env[62503]: DEBUG oslo_concurrency.lockutils [None req-6237cd83-53e5-445e-991f-0fa75149e2aa tempest-TenantUsagesTestJSON-2099757901 tempest-TenantUsagesTestJSON-2099757901-project-member] Lock "b570c2ad-18ac-42ea-bc2e-009992ece3fe" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 32.384s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 568.301854] env[62503]: DEBUG nova.scheduler.client.report [None req-5be7067b-f1af-42fc-acec-92c0c71cc7a8 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 568.550486] env[62503]: INFO nova.compute.manager [None req-b7e47845-e371-42bf-a29f-c41620bf02ac tempest-DeleteServersAdminTestJSON-318637377 tempest-DeleteServersAdminTestJSON-318637377-project-member] [instance: f8142528-e04c-444a-a252-84e98cecee74] Took 1.04 seconds to deallocate network for instance. [ 568.714501] env[62503]: DEBUG nova.compute.manager [req-bfa03fcb-377e-4d0f-899c-ace79cf7abac req-8ed865c0-62db-44a5-a7a5-9ad76d3dec5d service nova] [instance: 78599fa1-be64-4797-92a9-ebc3a40b59a1] Received event network-changed-b4bbbe8d-e2ef-4d38-b0c2-6afa9ec6fcf8 {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 568.714701] env[62503]: DEBUG nova.compute.manager [req-bfa03fcb-377e-4d0f-899c-ace79cf7abac req-8ed865c0-62db-44a5-a7a5-9ad76d3dec5d service nova] [instance: 78599fa1-be64-4797-92a9-ebc3a40b59a1] Refreshing instance network info cache due to event network-changed-b4bbbe8d-e2ef-4d38-b0c2-6afa9ec6fcf8. {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11432}} [ 568.714916] env[62503]: DEBUG oslo_concurrency.lockutils [req-bfa03fcb-377e-4d0f-899c-ace79cf7abac req-8ed865c0-62db-44a5-a7a5-9ad76d3dec5d service nova] Acquiring lock "refresh_cache-78599fa1-be64-4797-92a9-ebc3a40b59a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 568.715354] env[62503]: DEBUG oslo_concurrency.lockutils [req-bfa03fcb-377e-4d0f-899c-ace79cf7abac req-8ed865c0-62db-44a5-a7a5-9ad76d3dec5d service nova] Acquired lock "refresh_cache-78599fa1-be64-4797-92a9-ebc3a40b59a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 568.715843] env[62503]: DEBUG nova.network.neutron [req-bfa03fcb-377e-4d0f-899c-ace79cf7abac req-8ed865c0-62db-44a5-a7a5-9ad76d3dec5d service nova] [instance: 78599fa1-be64-4797-92a9-ebc3a40b59a1] Refreshing network info cache for port b4bbbe8d-e2ef-4d38-b0c2-6afa9ec6fcf8 {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 568.719068] env[62503]: DEBUG nova.compute.manager [None req-cc9648da-f367-499f-9ae4-d540c12618f7 tempest-ServerActionsTestJSON-1148358597 tempest-ServerActionsTestJSON-1148358597-project-member] [instance: d8d4c087-9b0f-48c7-bd05-291a7e2a3e83] Starting instance... {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 568.814247] env[62503]: DEBUG oslo_concurrency.lockutils [None req-5be7067b-f1af-42fc-acec-92c0c71cc7a8 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.443s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 568.814633] env[62503]: DEBUG nova.compute.manager [None req-5be7067b-f1af-42fc-acec-92c0c71cc7a8 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] [instance: c4a88e75-690f-4bed-a4f9-a0de3b193eff] Start building networks asynchronously for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2832}} [ 568.818100] env[62503]: DEBUG oslo_concurrency.lockutils [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 12.416s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 569.192151] env[62503]: DEBUG nova.compute.manager [None req-d6152b3e-00ff-4415-a372-c30d5fa1a402 tempest-ServerDiagnosticsV248Test-922741278 tempest-ServerDiagnosticsV248Test-922741278-project-admin] [instance: 90e42997-a34c-4a39-8d2f-7ab0ed19f028] Checking state {{(pid=62503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1794}} [ 569.193819] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5a5159b-9cd3-4a8f-abba-a5f39dc17e61 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.205123] env[62503]: INFO nova.compute.manager [None req-d6152b3e-00ff-4415-a372-c30d5fa1a402 tempest-ServerDiagnosticsV248Test-922741278 tempest-ServerDiagnosticsV248Test-922741278-project-admin] [instance: 90e42997-a34c-4a39-8d2f-7ab0ed19f028] Retrieving diagnostics [ 569.205123] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-982b5ffc-e5a3-4c21-8aa9-8865495b46a2 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.247045] env[62503]: DEBUG nova.compute.manager [req-7991ef42-94c2-4c66-b32f-f08fbc1ca1c0 req-94cc7c11-02bf-4ceb-bd18-b1ee4b00ec31 service nova] [instance: 9f83ec50-5143-45e1-849a-5c441d2702e2] Received event network-vif-deleted-5e215c2e-3299-4635-90c3-831728c1b765 {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 569.268765] env[62503]: DEBUG oslo_concurrency.lockutils [None req-cc9648da-f367-499f-9ae4-d540c12618f7 tempest-ServerActionsTestJSON-1148358597 tempest-ServerActionsTestJSON-1148358597-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 569.297911] env[62503]: DEBUG nova.network.neutron [req-bfa03fcb-377e-4d0f-899c-ace79cf7abac req-8ed865c0-62db-44a5-a7a5-9ad76d3dec5d service nova] [instance: 78599fa1-be64-4797-92a9-ebc3a40b59a1] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 569.321600] env[62503]: DEBUG nova.compute.utils [None req-5be7067b-f1af-42fc-acec-92c0c71cc7a8 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Using /dev/sd instead of None {{(pid=62503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 569.328112] env[62503]: DEBUG nova.compute.manager [None req-5be7067b-f1af-42fc-acec-92c0c71cc7a8 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] [instance: c4a88e75-690f-4bed-a4f9-a0de3b193eff] Not allocating networking since 'none' was specified. {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1980}} [ 569.468641] env[62503]: DEBUG nova.network.neutron [req-bfa03fcb-377e-4d0f-899c-ace79cf7abac req-8ed865c0-62db-44a5-a7a5-9ad76d3dec5d service nova] [instance: 78599fa1-be64-4797-92a9-ebc3a40b59a1] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 569.648384] env[62503]: INFO nova.scheduler.client.report [None req-b7e47845-e371-42bf-a29f-c41620bf02ac tempest-DeleteServersAdminTestJSON-318637377 tempest-DeleteServersAdminTestJSON-318637377-project-member] Deleted allocations for instance f8142528-e04c-444a-a252-84e98cecee74 [ 569.829508] env[62503]: DEBUG nova.compute.manager [None req-5be7067b-f1af-42fc-acec-92c0c71cc7a8 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] [instance: c4a88e75-690f-4bed-a4f9-a0de3b193eff] Start building block device mappings for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2867}} [ 569.867475] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Instance 90e42997-a34c-4a39-8d2f-7ab0ed19f028 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 569.867646] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Instance 0b1ead99-af3b-41a6-8354-bc451a51133c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 569.867724] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Instance 18c503dc-6283-4489-a69c-8dead1ec3a0d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 569.867845] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Instance 24e6abd6-fb6f-49ba-b01b-3977ff205fef actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 569.868084] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Instance 78599fa1-be64-4797-92a9-ebc3a40b59a1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 569.868084] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Instance 9f83ec50-5143-45e1-849a-5c441d2702e2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 569.868706] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Instance c4a88e75-690f-4bed-a4f9-a0de3b193eff actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 569.971892] env[62503]: DEBUG oslo_concurrency.lockutils [req-bfa03fcb-377e-4d0f-899c-ace79cf7abac req-8ed865c0-62db-44a5-a7a5-9ad76d3dec5d service nova] Releasing lock "refresh_cache-78599fa1-be64-4797-92a9-ebc3a40b59a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 569.971892] env[62503]: DEBUG nova.compute.manager [req-bfa03fcb-377e-4d0f-899c-ace79cf7abac req-8ed865c0-62db-44a5-a7a5-9ad76d3dec5d service nova] [instance: 78599fa1-be64-4797-92a9-ebc3a40b59a1] Received event network-vif-deleted-b4bbbe8d-e2ef-4d38-b0c2-6afa9ec6fcf8 {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 570.156342] env[62503]: DEBUG oslo_concurrency.lockutils [None req-b7e47845-e371-42bf-a29f-c41620bf02ac tempest-DeleteServersAdminTestJSON-318637377 tempest-DeleteServersAdminTestJSON-318637377-project-member] Lock "f8142528-e04c-444a-a252-84e98cecee74" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 34.365s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 570.371263] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Instance 59fd4a4a-20f5-4b8f-970a-acfc882f45a3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 570.660704] env[62503]: DEBUG nova.compute.manager [None req-2db54e89-b7c8-428c-855a-ae3533893b41 tempest-VolumesAssistedSnapshotsTest-64252410 tempest-VolumesAssistedSnapshotsTest-64252410-project-member] [instance: 24b4c233-c874-452e-a7fc-492ca2a49a09] Starting instance... {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 570.745869] env[62503]: DEBUG oslo_concurrency.lockutils [None req-733aadb7-44ff-4603-9cb7-3c04ddeea34f tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] Acquiring lock "30befad4-aacb-44d5-87ed-4fc6b0e34bd6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 570.746164] env[62503]: DEBUG oslo_concurrency.lockutils [None req-733aadb7-44ff-4603-9cb7-3c04ddeea34f tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] Lock "30befad4-aacb-44d5-87ed-4fc6b0e34bd6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 570.850518] env[62503]: DEBUG nova.compute.manager [None req-5be7067b-f1af-42fc-acec-92c0c71cc7a8 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] [instance: c4a88e75-690f-4bed-a4f9-a0de3b193eff] Start spawning the instance on the hypervisor. {{(pid=62503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2641}} [ 570.877616] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Instance 7a0b2744-2bb0-4eee-9861-418ba67b719c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 570.893385] env[62503]: DEBUG nova.virt.hardware [None req-5be7067b-f1af-42fc-acec-92c0c71cc7a8 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:26:20Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:26:03Z,direct_url=,disk_format='vmdk',id=8150ca02-f879-471d-8913-459408f127a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26d9ee75d25b4018b8bb1fb11c8bd98c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:26:04Z,virtual_size=,visibility=), allow threads: False {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 570.893385] env[62503]: DEBUG nova.virt.hardware [None req-5be7067b-f1af-42fc-acec-92c0c71cc7a8 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Flavor limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 570.893385] env[62503]: DEBUG nova.virt.hardware [None req-5be7067b-f1af-42fc-acec-92c0c71cc7a8 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Image limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 570.893385] env[62503]: DEBUG nova.virt.hardware [None req-5be7067b-f1af-42fc-acec-92c0c71cc7a8 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Flavor pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 570.893640] env[62503]: DEBUG nova.virt.hardware [None req-5be7067b-f1af-42fc-acec-92c0c71cc7a8 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Image pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 570.893640] env[62503]: DEBUG nova.virt.hardware [None req-5be7067b-f1af-42fc-acec-92c0c71cc7a8 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 570.893640] env[62503]: DEBUG nova.virt.hardware [None req-5be7067b-f1af-42fc-acec-92c0c71cc7a8 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 570.893735] env[62503]: DEBUG nova.virt.hardware [None req-5be7067b-f1af-42fc-acec-92c0c71cc7a8 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 570.894759] env[62503]: DEBUG nova.virt.hardware [None req-5be7067b-f1af-42fc-acec-92c0c71cc7a8 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Got 1 possible topologies {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 570.894759] env[62503]: DEBUG nova.virt.hardware [None req-5be7067b-f1af-42fc-acec-92c0c71cc7a8 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 570.894759] env[62503]: DEBUG nova.virt.hardware [None req-5be7067b-f1af-42fc-acec-92c0c71cc7a8 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 570.895554] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2aca6665-699b-43f5-9a59-0153a3986c69 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.906187] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cc1ccac-455b-4cef-8f09-298043031b67 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.921539] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-5be7067b-f1af-42fc-acec-92c0c71cc7a8 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] [instance: c4a88e75-690f-4bed-a4f9-a0de3b193eff] Instance VIF info [] {{(pid=62503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 570.928106] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-5be7067b-f1af-42fc-acec-92c0c71cc7a8 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Creating folder: Project (f52debfdd41f41fa8d7f7d346022d9ac). Parent ref: group-v294540. {{(pid=62503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 570.928106] env[62503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a4943613-f593-4092-baff-90b349d156d7 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.938108] env[62503]: INFO nova.virt.vmwareapi.vm_util [None req-5be7067b-f1af-42fc-acec-92c0c71cc7a8 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Created folder: Project (f52debfdd41f41fa8d7f7d346022d9ac) in parent group-v294540. [ 570.938189] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-5be7067b-f1af-42fc-acec-92c0c71cc7a8 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Creating folder: Instances. Parent ref: group-v294548. {{(pid=62503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 570.938425] env[62503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7ff394af-23a2-4483-b72b-46f89e73b245 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.948294] env[62503]: INFO nova.virt.vmwareapi.vm_util [None req-5be7067b-f1af-42fc-acec-92c0c71cc7a8 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Created folder: Instances in parent group-v294548. [ 570.948836] env[62503]: DEBUG oslo.service.loopingcall [None req-5be7067b-f1af-42fc-acec-92c0c71cc7a8 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 570.950022] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c4a88e75-690f-4bed-a4f9-a0de3b193eff] Creating VM on the ESX host {{(pid=62503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 570.950022] env[62503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-710ab4a2-b42c-44c3-805e-e4c7a2184038 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.971803] env[62503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 570.971803] env[62503]: value = "task-1387678" [ 570.971803] env[62503]: _type = "Task" [ 570.971803] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 570.981361] env[62503]: DEBUG oslo_vmware.api [-] Task: {'id': task-1387678, 'name': CreateVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 571.199510] env[62503]: DEBUG oslo_concurrency.lockutils [None req-2db54e89-b7c8-428c-855a-ae3533893b41 tempest-VolumesAssistedSnapshotsTest-64252410 tempest-VolumesAssistedSnapshotsTest-64252410-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 571.386271] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Instance d8d4c087-9b0f-48c7-bd05-291a7e2a3e83 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 571.481825] env[62503]: DEBUG oslo_vmware.api [-] Task: {'id': task-1387678, 'name': CreateVM_Task, 'duration_secs': 0.305896} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 571.481997] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c4a88e75-690f-4bed-a4f9-a0de3b193eff] Created VM on the ESX host {{(pid=62503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 571.482884] env[62503]: DEBUG oslo_vmware.service [None req-5be7067b-f1af-42fc-acec-92c0c71cc7a8 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eff250e7-063f-45e0-9383-7e903c1d159b {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.489616] env[62503]: DEBUG oslo_concurrency.lockutils [None req-5be7067b-f1af-42fc-acec-92c0c71cc7a8 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 571.489616] env[62503]: DEBUG oslo_concurrency.lockutils [None req-5be7067b-f1af-42fc-acec-92c0c71cc7a8 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 571.489616] env[62503]: DEBUG oslo_concurrency.lockutils [None req-5be7067b-f1af-42fc-acec-92c0c71cc7a8 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 571.489811] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1a5d2a42-1d25-48c9-9739-88db8333cb93 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.494308] env[62503]: DEBUG oslo_vmware.api [None req-5be7067b-f1af-42fc-acec-92c0c71cc7a8 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Waiting for the task: (returnval){ [ 571.494308] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]521017e2-40e7-ef6a-e8ca-d69017b263a6" [ 571.494308] env[62503]: _type = "Task" [ 571.494308] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 571.502788] env[62503]: DEBUG oslo_vmware.api [None req-5be7067b-f1af-42fc-acec-92c0c71cc7a8 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]521017e2-40e7-ef6a-e8ca-d69017b263a6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 571.590562] env[62503]: DEBUG oslo_concurrency.lockutils [None req-e7961620-13a6-43b0-8a39-27d7c19defe9 tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] Acquiring lock "90e42997-a34c-4a39-8d2f-7ab0ed19f028" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 571.590814] env[62503]: DEBUG oslo_concurrency.lockutils [None req-e7961620-13a6-43b0-8a39-27d7c19defe9 tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] Lock "90e42997-a34c-4a39-8d2f-7ab0ed19f028" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 571.591040] env[62503]: DEBUG oslo_concurrency.lockutils [None req-e7961620-13a6-43b0-8a39-27d7c19defe9 tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] Acquiring lock "90e42997-a34c-4a39-8d2f-7ab0ed19f028-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 571.591988] env[62503]: DEBUG oslo_concurrency.lockutils [None req-e7961620-13a6-43b0-8a39-27d7c19defe9 tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] Lock "90e42997-a34c-4a39-8d2f-7ab0ed19f028-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 571.592328] env[62503]: DEBUG oslo_concurrency.lockutils [None req-e7961620-13a6-43b0-8a39-27d7c19defe9 tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] Lock "90e42997-a34c-4a39-8d2f-7ab0ed19f028-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.001s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 571.594955] env[62503]: INFO nova.compute.manager [None req-e7961620-13a6-43b0-8a39-27d7c19defe9 tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] [instance: 90e42997-a34c-4a39-8d2f-7ab0ed19f028] Terminating instance [ 571.598167] env[62503]: DEBUG oslo_concurrency.lockutils [None req-e7961620-13a6-43b0-8a39-27d7c19defe9 tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] Acquiring lock "refresh_cache-90e42997-a34c-4a39-8d2f-7ab0ed19f028" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 571.598167] env[62503]: DEBUG oslo_concurrency.lockutils [None req-e7961620-13a6-43b0-8a39-27d7c19defe9 tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] Acquired lock "refresh_cache-90e42997-a34c-4a39-8d2f-7ab0ed19f028" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 571.598167] env[62503]: DEBUG nova.network.neutron [None req-e7961620-13a6-43b0-8a39-27d7c19defe9 tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] [instance: 90e42997-a34c-4a39-8d2f-7ab0ed19f028] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 571.891778] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Instance 24b4c233-c874-452e-a7fc-492ca2a49a09 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 572.010890] env[62503]: DEBUG oslo_concurrency.lockutils [None req-5be7067b-f1af-42fc-acec-92c0c71cc7a8 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 572.011514] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-5be7067b-f1af-42fc-acec-92c0c71cc7a8 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] [instance: c4a88e75-690f-4bed-a4f9-a0de3b193eff] Processing image 8150ca02-f879-471d-8913-459408f127a1 {{(pid=62503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 572.011514] env[62503]: DEBUG oslo_concurrency.lockutils [None req-5be7067b-f1af-42fc-acec-92c0c71cc7a8 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 572.011514] env[62503]: DEBUG oslo_concurrency.lockutils [None req-5be7067b-f1af-42fc-acec-92c0c71cc7a8 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 572.011680] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-5be7067b-f1af-42fc-acec-92c0c71cc7a8 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 572.011814] env[62503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9ca4d12e-0f94-41ea-a858-4493bfaf18a3 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.033626] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-5be7067b-f1af-42fc-acec-92c0c71cc7a8 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 572.033626] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-5be7067b-f1af-42fc-acec-92c0c71cc7a8 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 572.036095] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09f2836a-7b93-421d-aaf9-0bbb3e29f197 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.045164] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-755381ed-df58-4957-b490-ac2c9875bd7f {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.053221] env[62503]: DEBUG oslo_vmware.api [None req-5be7067b-f1af-42fc-acec-92c0c71cc7a8 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Waiting for the task: (returnval){ [ 572.053221] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]520d26e0-6981-0f3d-26fb-d9b9002cead2" [ 572.053221] env[62503]: _type = "Task" [ 572.053221] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 572.068667] env[62503]: DEBUG oslo_vmware.api [None req-5be7067b-f1af-42fc-acec-92c0c71cc7a8 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]520d26e0-6981-0f3d-26fb-d9b9002cead2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 572.210237] env[62503]: DEBUG nova.network.neutron [None req-e7961620-13a6-43b0-8a39-27d7c19defe9 tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] [instance: 90e42997-a34c-4a39-8d2f-7ab0ed19f028] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 572.395437] env[62503]: DEBUG nova.network.neutron [None req-e7961620-13a6-43b0-8a39-27d7c19defe9 tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] [instance: 90e42997-a34c-4a39-8d2f-7ab0ed19f028] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 572.397045] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Instance 20bf8c62-8b80-45c2-98d4-5a960f465aa0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 572.569224] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-5be7067b-f1af-42fc-acec-92c0c71cc7a8 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] [instance: c4a88e75-690f-4bed-a4f9-a0de3b193eff] Preparing fetch location {{(pid=62503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 572.569343] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-5be7067b-f1af-42fc-acec-92c0c71cc7a8 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Creating directory with path [datastore2] vmware_temp/0a7f914b-b9fe-40dc-b1fa-331bd7a515dd/8150ca02-f879-471d-8913-459408f127a1 {{(pid=62503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 572.569540] env[62503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8375346b-5465-431e-9dfc-ce2e353a7334 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.598156] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-5be7067b-f1af-42fc-acec-92c0c71cc7a8 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Created directory with path [datastore2] vmware_temp/0a7f914b-b9fe-40dc-b1fa-331bd7a515dd/8150ca02-f879-471d-8913-459408f127a1 {{(pid=62503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 572.598156] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-5be7067b-f1af-42fc-acec-92c0c71cc7a8 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] [instance: c4a88e75-690f-4bed-a4f9-a0de3b193eff] Fetch image to [datastore2] vmware_temp/0a7f914b-b9fe-40dc-b1fa-331bd7a515dd/8150ca02-f879-471d-8913-459408f127a1/tmp-sparse.vmdk {{(pid=62503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 572.598156] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-5be7067b-f1af-42fc-acec-92c0c71cc7a8 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] [instance: c4a88e75-690f-4bed-a4f9-a0de3b193eff] Downloading image file data 8150ca02-f879-471d-8913-459408f127a1 to [datastore2] vmware_temp/0a7f914b-b9fe-40dc-b1fa-331bd7a515dd/8150ca02-f879-471d-8913-459408f127a1/tmp-sparse.vmdk on the data store datastore2 {{(pid=62503) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 572.598600] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce1edae6-c600-49a0-bb53-cf532f215b2b {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.611697] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-116deac0-8bc2-4323-aa8d-ad803c5ee8d6 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.622293] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ae9d24a-ea3c-4573-8b0f-5e1b33230aff {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.658541] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8ace267-88dd-40ee-a508-37a325e0c548 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.664790] env[62503]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-6dacc692-863c-4e09-b060-10483d36faa9 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.689142] env[62503]: DEBUG nova.virt.vmwareapi.images [None req-5be7067b-f1af-42fc-acec-92c0c71cc7a8 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] [instance: c4a88e75-690f-4bed-a4f9-a0de3b193eff] Downloading image file data 8150ca02-f879-471d-8913-459408f127a1 to the data store datastore2 {{(pid=62503) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 572.775214] env[62503]: DEBUG oslo_vmware.rw_handles [None req-5be7067b-f1af-42fc-acec-92c0c71cc7a8 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/0a7f914b-b9fe-40dc-b1fa-331bd7a515dd/8150ca02-f879-471d-8913-459408f127a1/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62503) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 572.900667] env[62503]: DEBUG oslo_concurrency.lockutils [None req-e7961620-13a6-43b0-8a39-27d7c19defe9 tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] Releasing lock "refresh_cache-90e42997-a34c-4a39-8d2f-7ab0ed19f028" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 572.900667] env[62503]: DEBUG nova.compute.manager [None req-e7961620-13a6-43b0-8a39-27d7c19defe9 tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] [instance: 90e42997-a34c-4a39-8d2f-7ab0ed19f028] Start destroying the instance on the hypervisor. {{(pid=62503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3156}} [ 572.900667] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-e7961620-13a6-43b0-8a39-27d7c19defe9 tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] [instance: 90e42997-a34c-4a39-8d2f-7ab0ed19f028] Destroying instance {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 572.900977] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Instance bef7d4e7-9af2-4071-ae6d-bdbfa7f46460 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 572.902816] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e2d615e-de37-46a5-ac80-1f884891bf64 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.919626] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-e7961620-13a6-43b0-8a39-27d7c19defe9 tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] [instance: 90e42997-a34c-4a39-8d2f-7ab0ed19f028] Powering off the VM {{(pid=62503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 572.922016] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c6faa257-8b25-4423-8977-ffce98b6e8ad {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.927039] env[62503]: DEBUG oslo_vmware.api [None req-e7961620-13a6-43b0-8a39-27d7c19defe9 tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] Waiting for the task: (returnval){ [ 572.927039] env[62503]: value = "task-1387680" [ 572.927039] env[62503]: _type = "Task" [ 572.927039] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 572.940067] env[62503]: DEBUG oslo_vmware.api [None req-e7961620-13a6-43b0-8a39-27d7c19defe9 tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] Task: {'id': task-1387680, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 573.412967] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Instance eed1dcc3-d9f9-4211-a4c3-850dcdad72b1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 573.443995] env[62503]: DEBUG oslo_vmware.api [None req-e7961620-13a6-43b0-8a39-27d7c19defe9 tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] Task: {'id': task-1387680, 'name': PowerOffVM_Task, 'duration_secs': 0.141176} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 573.444583] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-e7961620-13a6-43b0-8a39-27d7c19defe9 tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] [instance: 90e42997-a34c-4a39-8d2f-7ab0ed19f028] Powered off the VM {{(pid=62503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 573.444793] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-e7961620-13a6-43b0-8a39-27d7c19defe9 tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] [instance: 90e42997-a34c-4a39-8d2f-7ab0ed19f028] Unregistering the VM {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 573.445090] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-837599f3-3ae3-4266-9048-b8a9cbb779eb {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.474248] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-e7961620-13a6-43b0-8a39-27d7c19defe9 tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] [instance: 90e42997-a34c-4a39-8d2f-7ab0ed19f028] Unregistered the VM {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 573.474248] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-e7961620-13a6-43b0-8a39-27d7c19defe9 tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] [instance: 90e42997-a34c-4a39-8d2f-7ab0ed19f028] Deleting contents of the VM from datastore datastore1 {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 573.474488] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-e7961620-13a6-43b0-8a39-27d7c19defe9 tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] Deleting the datastore file [datastore1] 90e42997-a34c-4a39-8d2f-7ab0ed19f028 {{(pid=62503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 573.474671] env[62503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2394c168-1143-481f-b3e9-d70dcb192da5 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.484541] env[62503]: DEBUG oslo_vmware.api [None req-e7961620-13a6-43b0-8a39-27d7c19defe9 tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] Waiting for the task: (returnval){ [ 573.484541] env[62503]: value = "task-1387682" [ 573.484541] env[62503]: _type = "Task" [ 573.484541] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 573.492825] env[62503]: DEBUG oslo_vmware.api [None req-e7961620-13a6-43b0-8a39-27d7c19defe9 tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] Task: {'id': task-1387682, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 573.574303] env[62503]: DEBUG oslo_vmware.rw_handles [None req-5be7067b-f1af-42fc-acec-92c0c71cc7a8 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Completed reading data from the image iterator. {{(pid=62503) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 573.574447] env[62503]: DEBUG oslo_vmware.rw_handles [None req-5be7067b-f1af-42fc-acec-92c0c71cc7a8 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/0a7f914b-b9fe-40dc-b1fa-331bd7a515dd/8150ca02-f879-471d-8913-459408f127a1/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62503) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 573.633152] env[62503]: DEBUG nova.virt.vmwareapi.images [None req-5be7067b-f1af-42fc-acec-92c0c71cc7a8 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] [instance: c4a88e75-690f-4bed-a4f9-a0de3b193eff] Downloaded image file data 8150ca02-f879-471d-8913-459408f127a1 to vmware_temp/0a7f914b-b9fe-40dc-b1fa-331bd7a515dd/8150ca02-f879-471d-8913-459408f127a1/tmp-sparse.vmdk on the data store datastore2 {{(pid=62503) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 573.633152] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-5be7067b-f1af-42fc-acec-92c0c71cc7a8 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] [instance: c4a88e75-690f-4bed-a4f9-a0de3b193eff] Caching image {{(pid=62503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 573.633859] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-5be7067b-f1af-42fc-acec-92c0c71cc7a8 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Copying Virtual Disk [datastore2] vmware_temp/0a7f914b-b9fe-40dc-b1fa-331bd7a515dd/8150ca02-f879-471d-8913-459408f127a1/tmp-sparse.vmdk to [datastore2] vmware_temp/0a7f914b-b9fe-40dc-b1fa-331bd7a515dd/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk {{(pid=62503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 573.634309] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e20f4aca-324d-4db1-ad13-e5e80c2c4d99 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.642371] env[62503]: DEBUG oslo_vmware.api [None req-5be7067b-f1af-42fc-acec-92c0c71cc7a8 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Waiting for the task: (returnval){ [ 573.642371] env[62503]: value = "task-1387684" [ 573.642371] env[62503]: _type = "Task" [ 573.642371] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 573.654851] env[62503]: DEBUG oslo_vmware.api [None req-5be7067b-f1af-42fc-acec-92c0c71cc7a8 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Task: {'id': task-1387684, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 573.916591] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Instance 1a27278b-b930-4432-90f2-45cdf025c83e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 573.916861] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Total usable vcpus: 48, total allocated vcpus: 7 {{(pid=62503) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 573.917545] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1856MB phys_disk=200GB used_disk=7GB total_vcpus=48 used_vcpus=7 pci_stats=[] {{(pid=62503) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 573.998992] env[62503]: DEBUG oslo_vmware.api [None req-e7961620-13a6-43b0-8a39-27d7c19defe9 tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] Task: {'id': task-1387682, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.101872} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 574.003695] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-e7961620-13a6-43b0-8a39-27d7c19defe9 tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] Deleted the datastore file {{(pid=62503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 574.004115] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-e7961620-13a6-43b0-8a39-27d7c19defe9 tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] [instance: 90e42997-a34c-4a39-8d2f-7ab0ed19f028] Deleted contents of the VM from datastore datastore1 {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 574.005964] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-e7961620-13a6-43b0-8a39-27d7c19defe9 tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] [instance: 90e42997-a34c-4a39-8d2f-7ab0ed19f028] Instance destroyed {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 574.005964] env[62503]: INFO nova.compute.manager [None req-e7961620-13a6-43b0-8a39-27d7c19defe9 tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] [instance: 90e42997-a34c-4a39-8d2f-7ab0ed19f028] Took 1.10 seconds to destroy the instance on the hypervisor. [ 574.005964] env[62503]: DEBUG oslo.service.loopingcall [None req-e7961620-13a6-43b0-8a39-27d7c19defe9 tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 574.005964] env[62503]: DEBUG nova.compute.manager [-] [instance: 90e42997-a34c-4a39-8d2f-7ab0ed19f028] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 574.006514] env[62503]: DEBUG nova.network.neutron [-] [instance: 90e42997-a34c-4a39-8d2f-7ab0ed19f028] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 574.061687] env[62503]: DEBUG nova.network.neutron [-] [instance: 90e42997-a34c-4a39-8d2f-7ab0ed19f028] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 574.155456] env[62503]: DEBUG oslo_vmware.api [None req-5be7067b-f1af-42fc-acec-92c0c71cc7a8 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Task: {'id': task-1387684, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 574.209653] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8a0de16-1a3d-4e85-8076-75d1c078abfb {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.218166] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2dd817e-b23b-4c0e-a11d-a88e491d23d5 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.257780] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa0eedea-0671-48d4-8454-36e57310ea72 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.265577] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76a33d7c-9c72-428c-a6ef-857eda82bcff {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.280151] env[62503]: DEBUG nova.compute.provider_tree [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 574.386080] env[62503]: DEBUG oslo_concurrency.lockutils [None req-3c8eac85-f4ca-46c6-a47f-7220e201d8ac tempest-FloatingIPsAssociationTestJSON-190004196 tempest-FloatingIPsAssociationTestJSON-190004196-project-member] Acquiring lock "a8d8e232-6096-4da3-8f2c-65a5e5f713ae" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 574.386408] env[62503]: DEBUG oslo_concurrency.lockutils [None req-3c8eac85-f4ca-46c6-a47f-7220e201d8ac tempest-FloatingIPsAssociationTestJSON-190004196 tempest-FloatingIPsAssociationTestJSON-190004196-project-member] Lock "a8d8e232-6096-4da3-8f2c-65a5e5f713ae" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 574.569030] env[62503]: DEBUG nova.network.neutron [-] [instance: 90e42997-a34c-4a39-8d2f-7ab0ed19f028] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 574.655561] env[62503]: DEBUG oslo_vmware.api [None req-5be7067b-f1af-42fc-acec-92c0c71cc7a8 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Task: {'id': task-1387684, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.773797} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 574.655952] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-5be7067b-f1af-42fc-acec-92c0c71cc7a8 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Copied Virtual Disk [datastore2] vmware_temp/0a7f914b-b9fe-40dc-b1fa-331bd7a515dd/8150ca02-f879-471d-8913-459408f127a1/tmp-sparse.vmdk to [datastore2] vmware_temp/0a7f914b-b9fe-40dc-b1fa-331bd7a515dd/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk {{(pid=62503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 574.656354] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-5be7067b-f1af-42fc-acec-92c0c71cc7a8 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Deleting the datastore file [datastore2] vmware_temp/0a7f914b-b9fe-40dc-b1fa-331bd7a515dd/8150ca02-f879-471d-8913-459408f127a1/tmp-sparse.vmdk {{(pid=62503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 574.656742] env[62503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1897c12a-e81c-432c-b249-b07dc459f8a4 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.664347] env[62503]: DEBUG oslo_vmware.api [None req-5be7067b-f1af-42fc-acec-92c0c71cc7a8 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Waiting for the task: (returnval){ [ 574.664347] env[62503]: value = "task-1387685" [ 574.664347] env[62503]: _type = "Task" [ 574.664347] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 574.673410] env[62503]: DEBUG oslo_vmware.api [None req-5be7067b-f1af-42fc-acec-92c0c71cc7a8 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Task: {'id': task-1387685, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 574.788071] env[62503]: DEBUG nova.scheduler.client.report [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 575.077063] env[62503]: INFO nova.compute.manager [-] [instance: 90e42997-a34c-4a39-8d2f-7ab0ed19f028] Took 1.07 seconds to deallocate network for instance. [ 575.159321] env[62503]: DEBUG oslo_concurrency.lockutils [None req-2f620b9e-0fbe-4cd8-a880-2520c6ad4568 tempest-DeleteServersAdminTestJSON-318637377 tempest-DeleteServersAdminTestJSON-318637377-project-member] Acquiring lock "73142a3f-3be8-4956-90f8-6ca223d2d01f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 575.159831] env[62503]: DEBUG oslo_concurrency.lockutils [None req-2f620b9e-0fbe-4cd8-a880-2520c6ad4568 tempest-DeleteServersAdminTestJSON-318637377 tempest-DeleteServersAdminTestJSON-318637377-project-member] Lock "73142a3f-3be8-4956-90f8-6ca223d2d01f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.002s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 575.186708] env[62503]: DEBUG oslo_vmware.api [None req-5be7067b-f1af-42fc-acec-92c0c71cc7a8 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Task: {'id': task-1387685, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.023105} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 575.187678] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-5be7067b-f1af-42fc-acec-92c0c71cc7a8 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Deleted the datastore file {{(pid=62503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 575.188040] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-5be7067b-f1af-42fc-acec-92c0c71cc7a8 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Moving file from [datastore2] vmware_temp/0a7f914b-b9fe-40dc-b1fa-331bd7a515dd/8150ca02-f879-471d-8913-459408f127a1 to [datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1. {{(pid=62503) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:334}} [ 575.188139] env[62503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MoveDatastoreFile_Task with opID=oslo.vmware-1d364c41-7270-474c-9c48-e26646fc56fe {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.206451] env[62503]: DEBUG oslo_vmware.api [None req-5be7067b-f1af-42fc-acec-92c0c71cc7a8 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Waiting for the task: (returnval){ [ 575.206451] env[62503]: value = "task-1387686" [ 575.206451] env[62503]: _type = "Task" [ 575.206451] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 575.220355] env[62503]: DEBUG oslo_vmware.api [None req-5be7067b-f1af-42fc-acec-92c0c71cc7a8 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Task: {'id': task-1387686, 'name': MoveDatastoreFile_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 575.294089] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62503) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 575.294340] env[62503]: DEBUG oslo_concurrency.lockutils [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 6.477s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 575.294630] env[62503]: DEBUG oslo_concurrency.lockutils [None req-2175fec1-420b-4607-b7a6-fef627f43b34 tempest-ImagesOneServerNegativeTestJSON-1494729482 tempest-ImagesOneServerNegativeTestJSON-1494729482-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 16.475s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 575.589600] env[62503]: DEBUG oslo_concurrency.lockutils [None req-e7961620-13a6-43b0-8a39-27d7c19defe9 tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 575.718289] env[62503]: DEBUG oslo_concurrency.lockutils [None req-6b7abc00-006d-4855-9995-538e6edbbf28 tempest-ServersWithSpecificFlavorTestJSON-213993766 tempest-ServersWithSpecificFlavorTestJSON-213993766-project-member] Acquiring lock "5a1af72f-71c8-42de-aa71-f011d85210a5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 575.718581] env[62503]: DEBUG oslo_concurrency.lockutils [None req-6b7abc00-006d-4855-9995-538e6edbbf28 tempest-ServersWithSpecificFlavorTestJSON-213993766 tempest-ServersWithSpecificFlavorTestJSON-213993766-project-member] Lock "5a1af72f-71c8-42de-aa71-f011d85210a5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 575.725662] env[62503]: DEBUG oslo_vmware.api [None req-5be7067b-f1af-42fc-acec-92c0c71cc7a8 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Task: {'id': task-1387686, 'name': MoveDatastoreFile_Task, 'duration_secs': 0.032631} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 575.725662] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-5be7067b-f1af-42fc-acec-92c0c71cc7a8 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] File moved {{(pid=62503) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:346}} [ 575.725662] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-5be7067b-f1af-42fc-acec-92c0c71cc7a8 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] [instance: c4a88e75-690f-4bed-a4f9-a0de3b193eff] Cleaning up location [datastore2] vmware_temp/0a7f914b-b9fe-40dc-b1fa-331bd7a515dd {{(pid=62503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 575.725662] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-5be7067b-f1af-42fc-acec-92c0c71cc7a8 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Deleting the datastore file [datastore2] vmware_temp/0a7f914b-b9fe-40dc-b1fa-331bd7a515dd {{(pid=62503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 575.725662] env[62503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fe756878-c434-40e9-965b-9812fcfac05c {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.732370] env[62503]: DEBUG oslo_vmware.api [None req-5be7067b-f1af-42fc-acec-92c0c71cc7a8 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Waiting for the task: (returnval){ [ 575.732370] env[62503]: value = "task-1387688" [ 575.732370] env[62503]: _type = "Task" [ 575.732370] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 575.741733] env[62503]: DEBUG oslo_vmware.api [None req-5be7067b-f1af-42fc-acec-92c0c71cc7a8 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Task: {'id': task-1387688, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 576.137078] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3896d14e-10e0-4dca-a506-e0ec19a57763 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.146322] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bd54843-8bf6-408e-8ed4-0bfe09d570be {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.188694] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b12537b9-7d7d-4bb2-a7e9-13612e79f526 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.200013] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db79fa59-ffaf-4fd8-9488-f69feb314ff8 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.215027] env[62503]: DEBUG nova.compute.provider_tree [None req-2175fec1-420b-4607-b7a6-fef627f43b34 tempest-ImagesOneServerNegativeTestJSON-1494729482 tempest-ImagesOneServerNegativeTestJSON-1494729482-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 576.245319] env[62503]: DEBUG oslo_vmware.api [None req-5be7067b-f1af-42fc-acec-92c0c71cc7a8 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Task: {'id': task-1387688, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.026943} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 576.245631] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-5be7067b-f1af-42fc-acec-92c0c71cc7a8 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Deleted the datastore file {{(pid=62503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 576.246322] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f8bad543-6d00-4e6c-875e-9df772451c76 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.251954] env[62503]: DEBUG oslo_vmware.api [None req-5be7067b-f1af-42fc-acec-92c0c71cc7a8 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Waiting for the task: (returnval){ [ 576.251954] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]5282c9ab-5049-e6e1-d3df-6ad73bad7d3c" [ 576.251954] env[62503]: _type = "Task" [ 576.251954] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 576.260341] env[62503]: DEBUG oslo_vmware.api [None req-5be7067b-f1af-42fc-acec-92c0c71cc7a8 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]5282c9ab-5049-e6e1-d3df-6ad73bad7d3c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 576.718073] env[62503]: DEBUG nova.scheduler.client.report [None req-2175fec1-420b-4607-b7a6-fef627f43b34 tempest-ImagesOneServerNegativeTestJSON-1494729482 tempest-ImagesOneServerNegativeTestJSON-1494729482-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 576.764301] env[62503]: DEBUG oslo_vmware.api [None req-5be7067b-f1af-42fc-acec-92c0c71cc7a8 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]5282c9ab-5049-e6e1-d3df-6ad73bad7d3c, 'name': SearchDatastore_Task, 'duration_secs': 0.011379} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 576.768695] env[62503]: DEBUG oslo_concurrency.lockutils [None req-5be7067b-f1af-42fc-acec-92c0c71cc7a8 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 576.768695] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-5be7067b-f1af-42fc-acec-92c0c71cc7a8 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk to [datastore2] c4a88e75-690f-4bed-a4f9-a0de3b193eff/c4a88e75-690f-4bed-a4f9-a0de3b193eff.vmdk {{(pid=62503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 576.768695] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d49ea381-5865-41c5-aff4-916a238775ae {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.775661] env[62503]: DEBUG oslo_vmware.api [None req-5be7067b-f1af-42fc-acec-92c0c71cc7a8 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Waiting for the task: (returnval){ [ 576.775661] env[62503]: value = "task-1387689" [ 576.775661] env[62503]: _type = "Task" [ 576.775661] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 576.787382] env[62503]: DEBUG oslo_vmware.api [None req-5be7067b-f1af-42fc-acec-92c0c71cc7a8 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Task: {'id': task-1387689, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 577.231500] env[62503]: DEBUG oslo_concurrency.lockutils [None req-2175fec1-420b-4607-b7a6-fef627f43b34 tempest-ImagesOneServerNegativeTestJSON-1494729482 tempest-ImagesOneServerNegativeTestJSON-1494729482-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.935s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 577.231500] env[62503]: ERROR nova.compute.manager [None req-2175fec1-420b-4607-b7a6-fef627f43b34 tempest-ImagesOneServerNegativeTestJSON-1494729482 tempest-ImagesOneServerNegativeTestJSON-1494729482-project-member] [instance: 0b1ead99-af3b-41a6-8354-bc451a51133c] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 85548955-452a-4861-a7b7-7b7a736f42d3, please check neutron logs for more information. [ 577.231500] env[62503]: ERROR nova.compute.manager [instance: 0b1ead99-af3b-41a6-8354-bc451a51133c] Traceback (most recent call last): [ 577.231500] env[62503]: ERROR nova.compute.manager [instance: 0b1ead99-af3b-41a6-8354-bc451a51133c] File "/opt/stack/nova/nova/compute/manager.py", line 2644, in _build_and_run_instance [ 577.231500] env[62503]: ERROR nova.compute.manager [instance: 0b1ead99-af3b-41a6-8354-bc451a51133c] self.driver.spawn(context, instance, image_meta, [ 577.231500] env[62503]: ERROR nova.compute.manager [instance: 0b1ead99-af3b-41a6-8354-bc451a51133c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 577.231500] env[62503]: ERROR nova.compute.manager [instance: 0b1ead99-af3b-41a6-8354-bc451a51133c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 577.231500] env[62503]: ERROR nova.compute.manager [instance: 0b1ead99-af3b-41a6-8354-bc451a51133c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 577.231500] env[62503]: ERROR nova.compute.manager [instance: 0b1ead99-af3b-41a6-8354-bc451a51133c] vm_ref = self.build_virtual_machine(instance, [ 577.231825] env[62503]: ERROR nova.compute.manager [instance: 0b1ead99-af3b-41a6-8354-bc451a51133c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 577.231825] env[62503]: ERROR nova.compute.manager [instance: 0b1ead99-af3b-41a6-8354-bc451a51133c] vif_infos = vmwarevif.get_vif_info(self._session, [ 577.231825] env[62503]: ERROR nova.compute.manager [instance: 0b1ead99-af3b-41a6-8354-bc451a51133c] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 577.231825] env[62503]: ERROR nova.compute.manager [instance: 0b1ead99-af3b-41a6-8354-bc451a51133c] for vif in network_info: [ 577.231825] env[62503]: ERROR nova.compute.manager [instance: 0b1ead99-af3b-41a6-8354-bc451a51133c] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 577.231825] env[62503]: ERROR nova.compute.manager [instance: 0b1ead99-af3b-41a6-8354-bc451a51133c] return self._sync_wrapper(fn, *args, **kwargs) [ 577.231825] env[62503]: ERROR nova.compute.manager [instance: 0b1ead99-af3b-41a6-8354-bc451a51133c] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 577.231825] env[62503]: ERROR nova.compute.manager [instance: 0b1ead99-af3b-41a6-8354-bc451a51133c] self.wait() [ 577.231825] env[62503]: ERROR nova.compute.manager [instance: 0b1ead99-af3b-41a6-8354-bc451a51133c] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 577.231825] env[62503]: ERROR nova.compute.manager [instance: 0b1ead99-af3b-41a6-8354-bc451a51133c] self[:] = self._gt.wait() [ 577.231825] env[62503]: ERROR nova.compute.manager [instance: 0b1ead99-af3b-41a6-8354-bc451a51133c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 577.231825] env[62503]: ERROR nova.compute.manager [instance: 0b1ead99-af3b-41a6-8354-bc451a51133c] return self._exit_event.wait() [ 577.231825] env[62503]: ERROR nova.compute.manager [instance: 0b1ead99-af3b-41a6-8354-bc451a51133c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 577.232200] env[62503]: ERROR nova.compute.manager [instance: 0b1ead99-af3b-41a6-8354-bc451a51133c] result = hub.switch() [ 577.232200] env[62503]: ERROR nova.compute.manager [instance: 0b1ead99-af3b-41a6-8354-bc451a51133c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 577.232200] env[62503]: ERROR nova.compute.manager [instance: 0b1ead99-af3b-41a6-8354-bc451a51133c] return self.greenlet.switch() [ 577.232200] env[62503]: ERROR nova.compute.manager [instance: 0b1ead99-af3b-41a6-8354-bc451a51133c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 577.232200] env[62503]: ERROR nova.compute.manager [instance: 0b1ead99-af3b-41a6-8354-bc451a51133c] result = function(*args, **kwargs) [ 577.232200] env[62503]: ERROR nova.compute.manager [instance: 0b1ead99-af3b-41a6-8354-bc451a51133c] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 577.232200] env[62503]: ERROR nova.compute.manager [instance: 0b1ead99-af3b-41a6-8354-bc451a51133c] return func(*args, **kwargs) [ 577.232200] env[62503]: ERROR nova.compute.manager [instance: 0b1ead99-af3b-41a6-8354-bc451a51133c] File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 577.232200] env[62503]: ERROR nova.compute.manager [instance: 0b1ead99-af3b-41a6-8354-bc451a51133c] raise e [ 577.232200] env[62503]: ERROR nova.compute.manager [instance: 0b1ead99-af3b-41a6-8354-bc451a51133c] File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 577.232200] env[62503]: ERROR nova.compute.manager [instance: 0b1ead99-af3b-41a6-8354-bc451a51133c] nwinfo = self.network_api.allocate_for_instance( [ 577.232200] env[62503]: ERROR nova.compute.manager [instance: 0b1ead99-af3b-41a6-8354-bc451a51133c] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 577.232200] env[62503]: ERROR nova.compute.manager [instance: 0b1ead99-af3b-41a6-8354-bc451a51133c] created_port_ids = self._update_ports_for_instance( [ 577.233545] env[62503]: ERROR nova.compute.manager [instance: 0b1ead99-af3b-41a6-8354-bc451a51133c] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 577.233545] env[62503]: ERROR nova.compute.manager [instance: 0b1ead99-af3b-41a6-8354-bc451a51133c] with excutils.save_and_reraise_exception(): [ 577.233545] env[62503]: ERROR nova.compute.manager [instance: 0b1ead99-af3b-41a6-8354-bc451a51133c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 577.233545] env[62503]: ERROR nova.compute.manager [instance: 0b1ead99-af3b-41a6-8354-bc451a51133c] self.force_reraise() [ 577.233545] env[62503]: ERROR nova.compute.manager [instance: 0b1ead99-af3b-41a6-8354-bc451a51133c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 577.233545] env[62503]: ERROR nova.compute.manager [instance: 0b1ead99-af3b-41a6-8354-bc451a51133c] raise self.value [ 577.233545] env[62503]: ERROR nova.compute.manager [instance: 0b1ead99-af3b-41a6-8354-bc451a51133c] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 577.233545] env[62503]: ERROR nova.compute.manager [instance: 0b1ead99-af3b-41a6-8354-bc451a51133c] updated_port = self._update_port( [ 577.233545] env[62503]: ERROR nova.compute.manager [instance: 0b1ead99-af3b-41a6-8354-bc451a51133c] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 577.233545] env[62503]: ERROR nova.compute.manager [instance: 0b1ead99-af3b-41a6-8354-bc451a51133c] _ensure_no_port_binding_failure(port) [ 577.233545] env[62503]: ERROR nova.compute.manager [instance: 0b1ead99-af3b-41a6-8354-bc451a51133c] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 577.233545] env[62503]: ERROR nova.compute.manager [instance: 0b1ead99-af3b-41a6-8354-bc451a51133c] raise exception.PortBindingFailed(port_id=port['id']) [ 577.233920] env[62503]: ERROR nova.compute.manager [instance: 0b1ead99-af3b-41a6-8354-bc451a51133c] nova.exception.PortBindingFailed: Binding failed for port 85548955-452a-4861-a7b7-7b7a736f42d3, please check neutron logs for more information. [ 577.233920] env[62503]: ERROR nova.compute.manager [instance: 0b1ead99-af3b-41a6-8354-bc451a51133c] [ 577.233920] env[62503]: DEBUG nova.compute.utils [None req-2175fec1-420b-4607-b7a6-fef627f43b34 tempest-ImagesOneServerNegativeTestJSON-1494729482 tempest-ImagesOneServerNegativeTestJSON-1494729482-project-member] [instance: 0b1ead99-af3b-41a6-8354-bc451a51133c] Binding failed for port 85548955-452a-4861-a7b7-7b7a736f42d3, please check neutron logs for more information. {{(pid=62503) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 577.238165] env[62503]: DEBUG oslo_concurrency.lockutils [None req-58237dd7-a314-47a2-91b0-62504c9b4c3a tempest-MigrationsAdminTest-1483146718 tempest-MigrationsAdminTest-1483146718-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.872s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 577.240800] env[62503]: DEBUG nova.compute.manager [None req-2175fec1-420b-4607-b7a6-fef627f43b34 tempest-ImagesOneServerNegativeTestJSON-1494729482 tempest-ImagesOneServerNegativeTestJSON-1494729482-project-member] [instance: 0b1ead99-af3b-41a6-8354-bc451a51133c] Build of instance 0b1ead99-af3b-41a6-8354-bc451a51133c was re-scheduled: Binding failed for port 85548955-452a-4861-a7b7-7b7a736f42d3, please check neutron logs for more information. {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2483}} [ 577.244560] env[62503]: DEBUG nova.compute.manager [None req-2175fec1-420b-4607-b7a6-fef627f43b34 tempest-ImagesOneServerNegativeTestJSON-1494729482 tempest-ImagesOneServerNegativeTestJSON-1494729482-project-member] [instance: 0b1ead99-af3b-41a6-8354-bc451a51133c] Unplugging VIFs for instance {{(pid=62503) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3009}} [ 577.244560] env[62503]: DEBUG oslo_concurrency.lockutils [None req-2175fec1-420b-4607-b7a6-fef627f43b34 tempest-ImagesOneServerNegativeTestJSON-1494729482 tempest-ImagesOneServerNegativeTestJSON-1494729482-project-member] Acquiring lock "refresh_cache-0b1ead99-af3b-41a6-8354-bc451a51133c" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 577.244560] env[62503]: DEBUG oslo_concurrency.lockutils [None req-2175fec1-420b-4607-b7a6-fef627f43b34 tempest-ImagesOneServerNegativeTestJSON-1494729482 tempest-ImagesOneServerNegativeTestJSON-1494729482-project-member] Acquired lock "refresh_cache-0b1ead99-af3b-41a6-8354-bc451a51133c" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 577.244560] env[62503]: DEBUG nova.network.neutron [None req-2175fec1-420b-4607-b7a6-fef627f43b34 tempest-ImagesOneServerNegativeTestJSON-1494729482 tempest-ImagesOneServerNegativeTestJSON-1494729482-project-member] [instance: 0b1ead99-af3b-41a6-8354-bc451a51133c] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 577.289583] env[62503]: DEBUG oslo_vmware.api [None req-5be7067b-f1af-42fc-acec-92c0c71cc7a8 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Task: {'id': task-1387689, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.494947} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 577.290035] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-5be7067b-f1af-42fc-acec-92c0c71cc7a8 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk to [datastore2] c4a88e75-690f-4bed-a4f9-a0de3b193eff/c4a88e75-690f-4bed-a4f9-a0de3b193eff.vmdk {{(pid=62503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 577.290429] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-5be7067b-f1af-42fc-acec-92c0c71cc7a8 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] [instance: c4a88e75-690f-4bed-a4f9-a0de3b193eff] Extending root virtual disk to 1048576 {{(pid=62503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 577.290875] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-bcc5a3bb-0a7b-4262-bf15-41eb020875d3 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.299100] env[62503]: DEBUG oslo_vmware.api [None req-5be7067b-f1af-42fc-acec-92c0c71cc7a8 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Waiting for the task: (returnval){ [ 577.299100] env[62503]: value = "task-1387690" [ 577.299100] env[62503]: _type = "Task" [ 577.299100] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 577.316483] env[62503]: DEBUG oslo_vmware.api [None req-5be7067b-f1af-42fc-acec-92c0c71cc7a8 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Task: {'id': task-1387690, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 577.769729] env[62503]: DEBUG nova.network.neutron [None req-2175fec1-420b-4607-b7a6-fef627f43b34 tempest-ImagesOneServerNegativeTestJSON-1494729482 tempest-ImagesOneServerNegativeTestJSON-1494729482-project-member] [instance: 0b1ead99-af3b-41a6-8354-bc451a51133c] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 577.809832] env[62503]: DEBUG oslo_vmware.api [None req-5be7067b-f1af-42fc-acec-92c0c71cc7a8 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Task: {'id': task-1387690, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.079528} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 577.810219] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-5be7067b-f1af-42fc-acec-92c0c71cc7a8 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] [instance: c4a88e75-690f-4bed-a4f9-a0de3b193eff] Extended root virtual disk {{(pid=62503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 577.811060] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43eb2000-c036-4611-83f2-f82eb604ae69 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.842302] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-5be7067b-f1af-42fc-acec-92c0c71cc7a8 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] [instance: c4a88e75-690f-4bed-a4f9-a0de3b193eff] Reconfiguring VM instance instance-0000000b to attach disk [datastore2] c4a88e75-690f-4bed-a4f9-a0de3b193eff/c4a88e75-690f-4bed-a4f9-a0de3b193eff.vmdk or device None with type sparse {{(pid=62503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 577.845362] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6d30407f-37d7-40d5-a13c-c4f1ff631159 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.868883] env[62503]: DEBUG oslo_vmware.api [None req-5be7067b-f1af-42fc-acec-92c0c71cc7a8 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Waiting for the task: (returnval){ [ 577.868883] env[62503]: value = "task-1387691" [ 577.868883] env[62503]: _type = "Task" [ 577.868883] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 577.877377] env[62503]: DEBUG oslo_vmware.api [None req-5be7067b-f1af-42fc-acec-92c0c71cc7a8 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Task: {'id': task-1387691, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 577.955642] env[62503]: DEBUG nova.network.neutron [None req-2175fec1-420b-4607-b7a6-fef627f43b34 tempest-ImagesOneServerNegativeTestJSON-1494729482 tempest-ImagesOneServerNegativeTestJSON-1494729482-project-member] [instance: 0b1ead99-af3b-41a6-8354-bc451a51133c] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 578.132332] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d47aa41-0e34-4ef1-a1aa-ec6d98307af3 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.140160] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7141bc37-afb5-4986-9486-12c28ddf5661 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.179660] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4f213ca-c226-4af1-a33e-86418f68e7b4 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.188425] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34de0005-d01a-48e8-974d-61822247f466 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.203198] env[62503]: DEBUG nova.compute.provider_tree [None req-58237dd7-a314-47a2-91b0-62504c9b4c3a tempest-MigrationsAdminTest-1483146718 tempest-MigrationsAdminTest-1483146718-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 578.383923] env[62503]: DEBUG oslo_vmware.api [None req-5be7067b-f1af-42fc-acec-92c0c71cc7a8 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Task: {'id': task-1387691, 'name': ReconfigVM_Task, 'duration_secs': 0.267855} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 578.388023] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-5be7067b-f1af-42fc-acec-92c0c71cc7a8 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] [instance: c4a88e75-690f-4bed-a4f9-a0de3b193eff] Reconfigured VM instance instance-0000000b to attach disk [datastore2] c4a88e75-690f-4bed-a4f9-a0de3b193eff/c4a88e75-690f-4bed-a4f9-a0de3b193eff.vmdk or device None with type sparse {{(pid=62503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 578.388023] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e6431461-cd73-4320-8bcb-b07249993d7f {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.394896] env[62503]: DEBUG oslo_vmware.api [None req-5be7067b-f1af-42fc-acec-92c0c71cc7a8 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Waiting for the task: (returnval){ [ 578.394896] env[62503]: value = "task-1387692" [ 578.394896] env[62503]: _type = "Task" [ 578.394896] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 578.408171] env[62503]: DEBUG oslo_vmware.api [None req-5be7067b-f1af-42fc-acec-92c0c71cc7a8 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Task: {'id': task-1387692, 'name': Rename_Task} progress is 6%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 578.459082] env[62503]: DEBUG oslo_concurrency.lockutils [None req-2175fec1-420b-4607-b7a6-fef627f43b34 tempest-ImagesOneServerNegativeTestJSON-1494729482 tempest-ImagesOneServerNegativeTestJSON-1494729482-project-member] Releasing lock "refresh_cache-0b1ead99-af3b-41a6-8354-bc451a51133c" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 578.459373] env[62503]: DEBUG nova.compute.manager [None req-2175fec1-420b-4607-b7a6-fef627f43b34 tempest-ImagesOneServerNegativeTestJSON-1494729482 tempest-ImagesOneServerNegativeTestJSON-1494729482-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62503) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3032}} [ 578.459576] env[62503]: DEBUG nova.compute.manager [None req-2175fec1-420b-4607-b7a6-fef627f43b34 tempest-ImagesOneServerNegativeTestJSON-1494729482 tempest-ImagesOneServerNegativeTestJSON-1494729482-project-member] [instance: 0b1ead99-af3b-41a6-8354-bc451a51133c] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 578.459777] env[62503]: DEBUG nova.network.neutron [None req-2175fec1-420b-4607-b7a6-fef627f43b34 tempest-ImagesOneServerNegativeTestJSON-1494729482 tempest-ImagesOneServerNegativeTestJSON-1494729482-project-member] [instance: 0b1ead99-af3b-41a6-8354-bc451a51133c] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 578.485880] env[62503]: DEBUG nova.network.neutron [None req-2175fec1-420b-4607-b7a6-fef627f43b34 tempest-ImagesOneServerNegativeTestJSON-1494729482 tempest-ImagesOneServerNegativeTestJSON-1494729482-project-member] [instance: 0b1ead99-af3b-41a6-8354-bc451a51133c] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 578.543228] env[62503]: DEBUG oslo_concurrency.lockutils [None req-fa417a80-55e3-4106-aabc-7d0e3e09907d tempest-ListImageFiltersTestJSON-1898919175 tempest-ListImageFiltersTestJSON-1898919175-project-member] Acquiring lock "f0fefd82-1670-4bbb-b250-da0c3b6ca3f6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 578.543228] env[62503]: DEBUG oslo_concurrency.lockutils [None req-fa417a80-55e3-4106-aabc-7d0e3e09907d tempest-ListImageFiltersTestJSON-1898919175 tempest-ListImageFiltersTestJSON-1898919175-project-member] Lock "f0fefd82-1670-4bbb-b250-da0c3b6ca3f6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 578.708751] env[62503]: DEBUG nova.scheduler.client.report [None req-58237dd7-a314-47a2-91b0-62504c9b4c3a tempest-MigrationsAdminTest-1483146718 tempest-MigrationsAdminTest-1483146718-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 578.861279] env[62503]: DEBUG oslo_concurrency.lockutils [None req-856c8a58-cdb0-4f72-ae8c-c9a975e5795d tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Acquiring lock "0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 578.861279] env[62503]: DEBUG oslo_concurrency.lockutils [None req-856c8a58-cdb0-4f72-ae8c-c9a975e5795d tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Lock "0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 578.905389] env[62503]: DEBUG oslo_vmware.api [None req-5be7067b-f1af-42fc-acec-92c0c71cc7a8 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Task: {'id': task-1387692, 'name': Rename_Task, 'duration_secs': 0.129335} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 578.905675] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-5be7067b-f1af-42fc-acec-92c0c71cc7a8 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] [instance: c4a88e75-690f-4bed-a4f9-a0de3b193eff] Powering on the VM {{(pid=62503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 578.906470] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-22e2e672-075a-4af9-bb29-b49193a89399 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.912574] env[62503]: DEBUG oslo_vmware.api [None req-5be7067b-f1af-42fc-acec-92c0c71cc7a8 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Waiting for the task: (returnval){ [ 578.912574] env[62503]: value = "task-1387693" [ 578.912574] env[62503]: _type = "Task" [ 578.912574] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 578.919719] env[62503]: DEBUG oslo_vmware.api [None req-5be7067b-f1af-42fc-acec-92c0c71cc7a8 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Task: {'id': task-1387693, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 578.987956] env[62503]: DEBUG nova.network.neutron [None req-2175fec1-420b-4607-b7a6-fef627f43b34 tempest-ImagesOneServerNegativeTestJSON-1494729482 tempest-ImagesOneServerNegativeTestJSON-1494729482-project-member] [instance: 0b1ead99-af3b-41a6-8354-bc451a51133c] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 579.216895] env[62503]: DEBUG oslo_concurrency.lockutils [None req-58237dd7-a314-47a2-91b0-62504c9b4c3a tempest-MigrationsAdminTest-1483146718 tempest-MigrationsAdminTest-1483146718-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.981s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 579.217560] env[62503]: ERROR nova.compute.manager [None req-58237dd7-a314-47a2-91b0-62504c9b4c3a tempest-MigrationsAdminTest-1483146718 tempest-MigrationsAdminTest-1483146718-project-member] [instance: 18c503dc-6283-4489-a69c-8dead1ec3a0d] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port ddb1371b-89f1-40c0-8e4a-2fffdc1f62eb, please check neutron logs for more information. [ 579.217560] env[62503]: ERROR nova.compute.manager [instance: 18c503dc-6283-4489-a69c-8dead1ec3a0d] Traceback (most recent call last): [ 579.217560] env[62503]: ERROR nova.compute.manager [instance: 18c503dc-6283-4489-a69c-8dead1ec3a0d] File "/opt/stack/nova/nova/compute/manager.py", line 2644, in _build_and_run_instance [ 579.217560] env[62503]: ERROR nova.compute.manager [instance: 18c503dc-6283-4489-a69c-8dead1ec3a0d] self.driver.spawn(context, instance, image_meta, [ 579.217560] env[62503]: ERROR nova.compute.manager [instance: 18c503dc-6283-4489-a69c-8dead1ec3a0d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 579.217560] env[62503]: ERROR nova.compute.manager [instance: 18c503dc-6283-4489-a69c-8dead1ec3a0d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 579.217560] env[62503]: ERROR nova.compute.manager [instance: 18c503dc-6283-4489-a69c-8dead1ec3a0d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 579.217560] env[62503]: ERROR nova.compute.manager [instance: 18c503dc-6283-4489-a69c-8dead1ec3a0d] vm_ref = self.build_virtual_machine(instance, [ 579.217560] env[62503]: ERROR nova.compute.manager [instance: 18c503dc-6283-4489-a69c-8dead1ec3a0d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 579.217560] env[62503]: ERROR nova.compute.manager [instance: 18c503dc-6283-4489-a69c-8dead1ec3a0d] vif_infos = vmwarevif.get_vif_info(self._session, [ 579.217560] env[62503]: ERROR nova.compute.manager [instance: 18c503dc-6283-4489-a69c-8dead1ec3a0d] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 579.218012] env[62503]: ERROR nova.compute.manager [instance: 18c503dc-6283-4489-a69c-8dead1ec3a0d] for vif in network_info: [ 579.218012] env[62503]: ERROR nova.compute.manager [instance: 18c503dc-6283-4489-a69c-8dead1ec3a0d] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 579.218012] env[62503]: ERROR nova.compute.manager [instance: 18c503dc-6283-4489-a69c-8dead1ec3a0d] return self._sync_wrapper(fn, *args, **kwargs) [ 579.218012] env[62503]: ERROR nova.compute.manager [instance: 18c503dc-6283-4489-a69c-8dead1ec3a0d] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 579.218012] env[62503]: ERROR nova.compute.manager [instance: 18c503dc-6283-4489-a69c-8dead1ec3a0d] self.wait() [ 579.218012] env[62503]: ERROR nova.compute.manager [instance: 18c503dc-6283-4489-a69c-8dead1ec3a0d] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 579.218012] env[62503]: ERROR nova.compute.manager [instance: 18c503dc-6283-4489-a69c-8dead1ec3a0d] self[:] = self._gt.wait() [ 579.218012] env[62503]: ERROR nova.compute.manager [instance: 18c503dc-6283-4489-a69c-8dead1ec3a0d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 579.218012] env[62503]: ERROR nova.compute.manager [instance: 18c503dc-6283-4489-a69c-8dead1ec3a0d] return self._exit_event.wait() [ 579.218012] env[62503]: ERROR nova.compute.manager [instance: 18c503dc-6283-4489-a69c-8dead1ec3a0d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 579.218012] env[62503]: ERROR nova.compute.manager [instance: 18c503dc-6283-4489-a69c-8dead1ec3a0d] result = hub.switch() [ 579.218012] env[62503]: ERROR nova.compute.manager [instance: 18c503dc-6283-4489-a69c-8dead1ec3a0d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 579.218012] env[62503]: ERROR nova.compute.manager [instance: 18c503dc-6283-4489-a69c-8dead1ec3a0d] return self.greenlet.switch() [ 579.218523] env[62503]: ERROR nova.compute.manager [instance: 18c503dc-6283-4489-a69c-8dead1ec3a0d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 579.218523] env[62503]: ERROR nova.compute.manager [instance: 18c503dc-6283-4489-a69c-8dead1ec3a0d] result = function(*args, **kwargs) [ 579.218523] env[62503]: ERROR nova.compute.manager [instance: 18c503dc-6283-4489-a69c-8dead1ec3a0d] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 579.218523] env[62503]: ERROR nova.compute.manager [instance: 18c503dc-6283-4489-a69c-8dead1ec3a0d] return func(*args, **kwargs) [ 579.218523] env[62503]: ERROR nova.compute.manager [instance: 18c503dc-6283-4489-a69c-8dead1ec3a0d] File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 579.218523] env[62503]: ERROR nova.compute.manager [instance: 18c503dc-6283-4489-a69c-8dead1ec3a0d] raise e [ 579.218523] env[62503]: ERROR nova.compute.manager [instance: 18c503dc-6283-4489-a69c-8dead1ec3a0d] File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 579.218523] env[62503]: ERROR nova.compute.manager [instance: 18c503dc-6283-4489-a69c-8dead1ec3a0d] nwinfo = self.network_api.allocate_for_instance( [ 579.218523] env[62503]: ERROR nova.compute.manager [instance: 18c503dc-6283-4489-a69c-8dead1ec3a0d] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 579.218523] env[62503]: ERROR nova.compute.manager [instance: 18c503dc-6283-4489-a69c-8dead1ec3a0d] created_port_ids = self._update_ports_for_instance( [ 579.218523] env[62503]: ERROR nova.compute.manager [instance: 18c503dc-6283-4489-a69c-8dead1ec3a0d] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 579.218523] env[62503]: ERROR nova.compute.manager [instance: 18c503dc-6283-4489-a69c-8dead1ec3a0d] with excutils.save_and_reraise_exception(): [ 579.218523] env[62503]: ERROR nova.compute.manager [instance: 18c503dc-6283-4489-a69c-8dead1ec3a0d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 579.219178] env[62503]: ERROR nova.compute.manager [instance: 18c503dc-6283-4489-a69c-8dead1ec3a0d] self.force_reraise() [ 579.219178] env[62503]: ERROR nova.compute.manager [instance: 18c503dc-6283-4489-a69c-8dead1ec3a0d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 579.219178] env[62503]: ERROR nova.compute.manager [instance: 18c503dc-6283-4489-a69c-8dead1ec3a0d] raise self.value [ 579.219178] env[62503]: ERROR nova.compute.manager [instance: 18c503dc-6283-4489-a69c-8dead1ec3a0d] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 579.219178] env[62503]: ERROR nova.compute.manager [instance: 18c503dc-6283-4489-a69c-8dead1ec3a0d] updated_port = self._update_port( [ 579.219178] env[62503]: ERROR nova.compute.manager [instance: 18c503dc-6283-4489-a69c-8dead1ec3a0d] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 579.219178] env[62503]: ERROR nova.compute.manager [instance: 18c503dc-6283-4489-a69c-8dead1ec3a0d] _ensure_no_port_binding_failure(port) [ 579.219178] env[62503]: ERROR nova.compute.manager [instance: 18c503dc-6283-4489-a69c-8dead1ec3a0d] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 579.219178] env[62503]: ERROR nova.compute.manager [instance: 18c503dc-6283-4489-a69c-8dead1ec3a0d] raise exception.PortBindingFailed(port_id=port['id']) [ 579.219178] env[62503]: ERROR nova.compute.manager [instance: 18c503dc-6283-4489-a69c-8dead1ec3a0d] nova.exception.PortBindingFailed: Binding failed for port ddb1371b-89f1-40c0-8e4a-2fffdc1f62eb, please check neutron logs for more information. [ 579.219178] env[62503]: ERROR nova.compute.manager [instance: 18c503dc-6283-4489-a69c-8dead1ec3a0d] [ 579.219838] env[62503]: DEBUG nova.compute.utils [None req-58237dd7-a314-47a2-91b0-62504c9b4c3a tempest-MigrationsAdminTest-1483146718 tempest-MigrationsAdminTest-1483146718-project-member] [instance: 18c503dc-6283-4489-a69c-8dead1ec3a0d] Binding failed for port ddb1371b-89f1-40c0-8e4a-2fffdc1f62eb, please check neutron logs for more information. {{(pid=62503) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 579.219838] env[62503]: DEBUG oslo_concurrency.lockutils [None req-14a931ce-672d-410b-8e2f-8af21632175d tempest-VolumesAdminNegativeTest-585860034 tempest-VolumesAdminNegativeTest-585860034-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.976s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 579.221143] env[62503]: INFO nova.compute.claims [None req-14a931ce-672d-410b-8e2f-8af21632175d tempest-VolumesAdminNegativeTest-585860034 tempest-VolumesAdminNegativeTest-585860034-project-member] [instance: 59fd4a4a-20f5-4b8f-970a-acfc882f45a3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 579.224973] env[62503]: DEBUG nova.compute.manager [None req-58237dd7-a314-47a2-91b0-62504c9b4c3a tempest-MigrationsAdminTest-1483146718 tempest-MigrationsAdminTest-1483146718-project-member] [instance: 18c503dc-6283-4489-a69c-8dead1ec3a0d] Build of instance 18c503dc-6283-4489-a69c-8dead1ec3a0d was re-scheduled: Binding failed for port ddb1371b-89f1-40c0-8e4a-2fffdc1f62eb, please check neutron logs for more information. {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2483}} [ 579.225533] env[62503]: DEBUG nova.compute.manager [None req-58237dd7-a314-47a2-91b0-62504c9b4c3a tempest-MigrationsAdminTest-1483146718 tempest-MigrationsAdminTest-1483146718-project-member] [instance: 18c503dc-6283-4489-a69c-8dead1ec3a0d] Unplugging VIFs for instance {{(pid=62503) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3009}} [ 579.225771] env[62503]: DEBUG oslo_concurrency.lockutils [None req-58237dd7-a314-47a2-91b0-62504c9b4c3a tempest-MigrationsAdminTest-1483146718 tempest-MigrationsAdminTest-1483146718-project-member] Acquiring lock "refresh_cache-18c503dc-6283-4489-a69c-8dead1ec3a0d" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 579.225917] env[62503]: DEBUG oslo_concurrency.lockutils [None req-58237dd7-a314-47a2-91b0-62504c9b4c3a tempest-MigrationsAdminTest-1483146718 tempest-MigrationsAdminTest-1483146718-project-member] Acquired lock "refresh_cache-18c503dc-6283-4489-a69c-8dead1ec3a0d" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 579.226085] env[62503]: DEBUG nova.network.neutron [None req-58237dd7-a314-47a2-91b0-62504c9b4c3a tempest-MigrationsAdminTest-1483146718 tempest-MigrationsAdminTest-1483146718-project-member] [instance: 18c503dc-6283-4489-a69c-8dead1ec3a0d] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 579.426677] env[62503]: DEBUG oslo_vmware.api [None req-5be7067b-f1af-42fc-acec-92c0c71cc7a8 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Task: {'id': task-1387693, 'name': PowerOnVM_Task, 'duration_secs': 0.444118} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 579.427054] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-5be7067b-f1af-42fc-acec-92c0c71cc7a8 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] [instance: c4a88e75-690f-4bed-a4f9-a0de3b193eff] Powered on the VM {{(pid=62503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 579.427186] env[62503]: INFO nova.compute.manager [None req-5be7067b-f1af-42fc-acec-92c0c71cc7a8 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] [instance: c4a88e75-690f-4bed-a4f9-a0de3b193eff] Took 8.58 seconds to spawn the instance on the hypervisor. [ 579.427379] env[62503]: DEBUG nova.compute.manager [None req-5be7067b-f1af-42fc-acec-92c0c71cc7a8 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] [instance: c4a88e75-690f-4bed-a4f9-a0de3b193eff] Checking state {{(pid=62503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1794}} [ 579.428941] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef124d64-8c4c-4853-9cad-827ead211416 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.492171] env[62503]: INFO nova.compute.manager [None req-2175fec1-420b-4607-b7a6-fef627f43b34 tempest-ImagesOneServerNegativeTestJSON-1494729482 tempest-ImagesOneServerNegativeTestJSON-1494729482-project-member] [instance: 0b1ead99-af3b-41a6-8354-bc451a51133c] Took 1.03 seconds to deallocate network for instance. [ 579.771034] env[62503]: DEBUG nova.network.neutron [None req-58237dd7-a314-47a2-91b0-62504c9b4c3a tempest-MigrationsAdminTest-1483146718 tempest-MigrationsAdminTest-1483146718-project-member] [instance: 18c503dc-6283-4489-a69c-8dead1ec3a0d] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 579.848780] env[62503]: DEBUG oslo_concurrency.lockutils [None req-812ef84d-18c3-4e01-ae3f-ea5c4ae1494f tempest-ListImageFiltersTestJSON-1898919175 tempest-ListImageFiltersTestJSON-1898919175-project-member] Acquiring lock "c32f170d-1e88-4716-a02a-b8db6896e900" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 579.849057] env[62503]: DEBUG oslo_concurrency.lockutils [None req-812ef84d-18c3-4e01-ae3f-ea5c4ae1494f tempest-ListImageFiltersTestJSON-1898919175 tempest-ListImageFiltersTestJSON-1898919175-project-member] Lock "c32f170d-1e88-4716-a02a-b8db6896e900" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 579.887550] env[62503]: DEBUG nova.network.neutron [None req-58237dd7-a314-47a2-91b0-62504c9b4c3a tempest-MigrationsAdminTest-1483146718 tempest-MigrationsAdminTest-1483146718-project-member] [instance: 18c503dc-6283-4489-a69c-8dead1ec3a0d] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 579.956122] env[62503]: INFO nova.compute.manager [None req-5be7067b-f1af-42fc-acec-92c0c71cc7a8 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] [instance: c4a88e75-690f-4bed-a4f9-a0de3b193eff] Took 24.22 seconds to build instance. [ 579.982121] env[62503]: DEBUG oslo_concurrency.lockutils [None req-b638fdbf-443c-4fc0-ac4f-b9d527537434 tempest-ImagesNegativeTestJSON-829730830 tempest-ImagesNegativeTestJSON-829730830-project-member] Acquiring lock "e49252e3-11cc-49c3-b959-24ad87ad48c9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 579.982608] env[62503]: DEBUG oslo_concurrency.lockutils [None req-b638fdbf-443c-4fc0-ac4f-b9d527537434 tempest-ImagesNegativeTestJSON-829730830 tempest-ImagesNegativeTestJSON-829730830-project-member] Lock "e49252e3-11cc-49c3-b959-24ad87ad48c9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 580.393023] env[62503]: DEBUG oslo_concurrency.lockutils [None req-58237dd7-a314-47a2-91b0-62504c9b4c3a tempest-MigrationsAdminTest-1483146718 tempest-MigrationsAdminTest-1483146718-project-member] Releasing lock "refresh_cache-18c503dc-6283-4489-a69c-8dead1ec3a0d" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 580.393329] env[62503]: DEBUG nova.compute.manager [None req-58237dd7-a314-47a2-91b0-62504c9b4c3a tempest-MigrationsAdminTest-1483146718 tempest-MigrationsAdminTest-1483146718-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62503) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3032}} [ 580.393500] env[62503]: DEBUG nova.compute.manager [None req-58237dd7-a314-47a2-91b0-62504c9b4c3a tempest-MigrationsAdminTest-1483146718 tempest-MigrationsAdminTest-1483146718-project-member] [instance: 18c503dc-6283-4489-a69c-8dead1ec3a0d] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 580.393666] env[62503]: DEBUG nova.network.neutron [None req-58237dd7-a314-47a2-91b0-62504c9b4c3a tempest-MigrationsAdminTest-1483146718 tempest-MigrationsAdminTest-1483146718-project-member] [instance: 18c503dc-6283-4489-a69c-8dead1ec3a0d] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 580.453272] env[62503]: DEBUG nova.network.neutron [None req-58237dd7-a314-47a2-91b0-62504c9b4c3a tempest-MigrationsAdminTest-1483146718 tempest-MigrationsAdminTest-1483146718-project-member] [instance: 18c503dc-6283-4489-a69c-8dead1ec3a0d] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 580.458660] env[62503]: DEBUG oslo_concurrency.lockutils [None req-5be7067b-f1af-42fc-acec-92c0c71cc7a8 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Lock "c4a88e75-690f-4bed-a4f9-a0de3b193eff" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 34.554s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 580.538682] env[62503]: INFO nova.scheduler.client.report [None req-2175fec1-420b-4607-b7a6-fef627f43b34 tempest-ImagesOneServerNegativeTestJSON-1494729482 tempest-ImagesOneServerNegativeTestJSON-1494729482-project-member] Deleted allocations for instance 0b1ead99-af3b-41a6-8354-bc451a51133c [ 580.644525] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61bd5621-58b6-40cf-9c4f-4eb6f94f3c99 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.655838] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5abd9492-0d54-4680-b45c-c9b197c552e5 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.696219] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dff0024c-e096-475c-9e9d-2887f278a049 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.704429] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e84504db-5252-4a45-b6c1-f00cef72ff3b {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.719284] env[62503]: DEBUG nova.compute.provider_tree [None req-14a931ce-672d-410b-8e2f-8af21632175d tempest-VolumesAdminNegativeTest-585860034 tempest-VolumesAdminNegativeTest-585860034-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 580.899412] env[62503]: DEBUG oslo_concurrency.lockutils [None req-f5db60c5-fc27-48c4-aa82-4f7bd2779675 tempest-ServersTestBootFromVolume-938742055 tempest-ServersTestBootFromVolume-938742055-project-member] Acquiring lock "5ca1a33c-7324-481c-95cd-3761ce8ccf13" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 580.899412] env[62503]: DEBUG oslo_concurrency.lockutils [None req-f5db60c5-fc27-48c4-aa82-4f7bd2779675 tempest-ServersTestBootFromVolume-938742055 tempest-ServersTestBootFromVolume-938742055-project-member] Lock "5ca1a33c-7324-481c-95cd-3761ce8ccf13" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.004s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 580.960287] env[62503]: DEBUG nova.network.neutron [None req-58237dd7-a314-47a2-91b0-62504c9b4c3a tempest-MigrationsAdminTest-1483146718 tempest-MigrationsAdminTest-1483146718-project-member] [instance: 18c503dc-6283-4489-a69c-8dead1ec3a0d] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 580.961882] env[62503]: DEBUG nova.compute.manager [None req-6fd2735d-f55f-4cf9-b0ab-01612c901fa1 tempest-AttachInterfacesUnderV243Test-714583448 tempest-AttachInterfacesUnderV243Test-714583448-project-member] [instance: 20bf8c62-8b80-45c2-98d4-5a960f465aa0] Starting instance... {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 581.048213] env[62503]: DEBUG oslo_concurrency.lockutils [None req-2175fec1-420b-4607-b7a6-fef627f43b34 tempest-ImagesOneServerNegativeTestJSON-1494729482 tempest-ImagesOneServerNegativeTestJSON-1494729482-project-member] Lock "0b1ead99-af3b-41a6-8354-bc451a51133c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 44.568s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 581.225148] env[62503]: DEBUG nova.scheduler.client.report [None req-14a931ce-672d-410b-8e2f-8af21632175d tempest-VolumesAdminNegativeTest-585860034 tempest-VolumesAdminNegativeTest-585860034-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 581.460801] env[62503]: INFO nova.compute.manager [None req-58237dd7-a314-47a2-91b0-62504c9b4c3a tempest-MigrationsAdminTest-1483146718 tempest-MigrationsAdminTest-1483146718-project-member] [instance: 18c503dc-6283-4489-a69c-8dead1ec3a0d] Took 1.07 seconds to deallocate network for instance. [ 581.493262] env[62503]: DEBUG oslo_concurrency.lockutils [None req-6fd2735d-f55f-4cf9-b0ab-01612c901fa1 tempest-AttachInterfacesUnderV243Test-714583448 tempest-AttachInterfacesUnderV243Test-714583448-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 581.554438] env[62503]: DEBUG nova.compute.manager [None req-4bad5bb1-7fa9-4e2b-9ed4-ab328e1c526a tempest-ServerActionsTestOtherB-427346871 tempest-ServerActionsTestOtherB-427346871-project-member] [instance: bef7d4e7-9af2-4071-ae6d-bdbfa7f46460] Starting instance... {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 581.586672] env[62503]: DEBUG oslo_concurrency.lockutils [None req-810913a5-54c9-4615-8817-f2307b8447be tempest-AttachVolumeTestJSON-1238928908 tempest-AttachVolumeTestJSON-1238928908-project-member] Acquiring lock "2aa7880f-de24-4f32-b027-731a2030f987" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 581.587326] env[62503]: DEBUG oslo_concurrency.lockutils [None req-810913a5-54c9-4615-8817-f2307b8447be tempest-AttachVolumeTestJSON-1238928908 tempest-AttachVolumeTestJSON-1238928908-project-member] Lock "2aa7880f-de24-4f32-b027-731a2030f987" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 581.730030] env[62503]: DEBUG oslo_concurrency.lockutils [None req-14a931ce-672d-410b-8e2f-8af21632175d tempest-VolumesAdminNegativeTest-585860034 tempest-VolumesAdminNegativeTest-585860034-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.510s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 581.730546] env[62503]: DEBUG nova.compute.manager [None req-14a931ce-672d-410b-8e2f-8af21632175d tempest-VolumesAdminNegativeTest-585860034 tempest-VolumesAdminNegativeTest-585860034-project-member] [instance: 59fd4a4a-20f5-4b8f-970a-acfc882f45a3] Start building networks asynchronously for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2832}} [ 581.734393] env[62503]: DEBUG oslo_concurrency.lockutils [None req-8e02d996-bd10-462a-bd9c-912409f6543d tempest-ServersAdminTestJSON-732764820 tempest-ServersAdminTestJSON-732764820-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 17.409s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 582.099417] env[62503]: DEBUG oslo_concurrency.lockutils [None req-4bad5bb1-7fa9-4e2b-9ed4-ab328e1c526a tempest-ServerActionsTestOtherB-427346871 tempest-ServerActionsTestOtherB-427346871-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 582.103292] env[62503]: INFO nova.compute.manager [None req-2ad81782-4e29-40f3-9961-e4f97d1aa36a tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] [instance: c4a88e75-690f-4bed-a4f9-a0de3b193eff] Rebuilding instance [ 582.171520] env[62503]: DEBUG nova.compute.manager [None req-2ad81782-4e29-40f3-9961-e4f97d1aa36a tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] [instance: c4a88e75-690f-4bed-a4f9-a0de3b193eff] Checking state {{(pid=62503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1794}} [ 582.172457] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f17db9a1-2bce-47b3-9695-2bd00e41dafa {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.243609] env[62503]: DEBUG nova.compute.utils [None req-14a931ce-672d-410b-8e2f-8af21632175d tempest-VolumesAdminNegativeTest-585860034 tempest-VolumesAdminNegativeTest-585860034-project-member] Using /dev/sd instead of None {{(pid=62503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 582.251694] env[62503]: DEBUG nova.compute.manager [None req-14a931ce-672d-410b-8e2f-8af21632175d tempest-VolumesAdminNegativeTest-585860034 tempest-VolumesAdminNegativeTest-585860034-project-member] [instance: 59fd4a4a-20f5-4b8f-970a-acfc882f45a3] Allocating IP information in the background. {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 582.251903] env[62503]: DEBUG nova.network.neutron [None req-14a931ce-672d-410b-8e2f-8af21632175d tempest-VolumesAdminNegativeTest-585860034 tempest-VolumesAdminNegativeTest-585860034-project-member] [instance: 59fd4a4a-20f5-4b8f-970a-acfc882f45a3] allocate_for_instance() {{(pid=62503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 582.488247] env[62503]: DEBUG nova.policy [None req-14a931ce-672d-410b-8e2f-8af21632175d tempest-VolumesAdminNegativeTest-585860034 tempest-VolumesAdminNegativeTest-585860034-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e16287e7f4114e1b9278fa463f7a1c3b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0c65ccbbe7774e7994830d31cdbb91ae', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62503) authorize /opt/stack/nova/nova/policy.py:201}} [ 582.505730] env[62503]: INFO nova.scheduler.client.report [None req-58237dd7-a314-47a2-91b0-62504c9b4c3a tempest-MigrationsAdminTest-1483146718 tempest-MigrationsAdminTest-1483146718-project-member] Deleted allocations for instance 18c503dc-6283-4489-a69c-8dead1ec3a0d [ 582.683420] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1a09296-b17f-4827-86cb-d0e6d3e47748 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.697706] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdca4c35-c772-4f6a-b554-54930b9ed5e8 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.735560] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efed387b-76ca-4c88-810d-2e432ad86afa {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.745226] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f75809db-ad4c-45a0-9b41-29ac4d27b312 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.762225] env[62503]: DEBUG nova.compute.manager [None req-14a931ce-672d-410b-8e2f-8af21632175d tempest-VolumesAdminNegativeTest-585860034 tempest-VolumesAdminNegativeTest-585860034-project-member] [instance: 59fd4a4a-20f5-4b8f-970a-acfc882f45a3] Start building block device mappings for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2867}} [ 582.765562] env[62503]: DEBUG nova.compute.provider_tree [None req-8e02d996-bd10-462a-bd9c-912409f6543d tempest-ServersAdminTestJSON-732764820 tempest-ServersAdminTestJSON-732764820-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 583.018712] env[62503]: DEBUG oslo_concurrency.lockutils [None req-58237dd7-a314-47a2-91b0-62504c9b4c3a tempest-MigrationsAdminTest-1483146718 tempest-MigrationsAdminTest-1483146718-project-member] Lock "18c503dc-6283-4489-a69c-8dead1ec3a0d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 45.967s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 583.199963] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ad81782-4e29-40f3-9961-e4f97d1aa36a tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] [instance: c4a88e75-690f-4bed-a4f9-a0de3b193eff] Powering off the VM {{(pid=62503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 583.199963] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c5db124e-d041-484a-89aa-9d6a82423eea {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.207317] env[62503]: DEBUG oslo_vmware.api [None req-2ad81782-4e29-40f3-9961-e4f97d1aa36a tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Waiting for the task: (returnval){ [ 583.207317] env[62503]: value = "task-1387694" [ 583.207317] env[62503]: _type = "Task" [ 583.207317] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 583.219236] env[62503]: DEBUG oslo_vmware.api [None req-2ad81782-4e29-40f3-9961-e4f97d1aa36a tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Task: {'id': task-1387694, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 583.273737] env[62503]: DEBUG nova.scheduler.client.report [None req-8e02d996-bd10-462a-bd9c-912409f6543d tempest-ServersAdminTestJSON-732764820 tempest-ServersAdminTestJSON-732764820-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 583.523312] env[62503]: DEBUG nova.compute.manager [None req-68ed7e64-78f9-431d-ac11-4e539144d770 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: eed1dcc3-d9f9-4211-a4c3-850dcdad72b1] Starting instance... {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 583.722537] env[62503]: DEBUG oslo_vmware.api [None req-2ad81782-4e29-40f3-9961-e4f97d1aa36a tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Task: {'id': task-1387694, 'name': PowerOffVM_Task, 'duration_secs': 0.14445} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 583.724235] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ad81782-4e29-40f3-9961-e4f97d1aa36a tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] [instance: c4a88e75-690f-4bed-a4f9-a0de3b193eff] Powered off the VM {{(pid=62503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 583.724235] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-2ad81782-4e29-40f3-9961-e4f97d1aa36a tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] [instance: c4a88e75-690f-4bed-a4f9-a0de3b193eff] Destroying instance {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 583.724235] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ecd56cc-5b21-4900-845d-16095a7cafc9 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.731831] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-2ad81782-4e29-40f3-9961-e4f97d1aa36a tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] [instance: c4a88e75-690f-4bed-a4f9-a0de3b193eff] Unregistering the VM {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 583.732048] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-51ff5539-1360-4a1f-968a-b54d143834d1 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.758052] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-2ad81782-4e29-40f3-9961-e4f97d1aa36a tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] [instance: c4a88e75-690f-4bed-a4f9-a0de3b193eff] Unregistered the VM {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 583.758052] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-2ad81782-4e29-40f3-9961-e4f97d1aa36a tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] [instance: c4a88e75-690f-4bed-a4f9-a0de3b193eff] Deleting contents of the VM from datastore datastore2 {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 583.758052] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-2ad81782-4e29-40f3-9961-e4f97d1aa36a tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Deleting the datastore file [datastore2] c4a88e75-690f-4bed-a4f9-a0de3b193eff {{(pid=62503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 583.758052] env[62503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-71cc1556-f7ef-4eb2-b133-fc7dae7c376c {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.766658] env[62503]: DEBUG oslo_vmware.api [None req-2ad81782-4e29-40f3-9961-e4f97d1aa36a tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Waiting for the task: (returnval){ [ 583.766658] env[62503]: value = "task-1387696" [ 583.766658] env[62503]: _type = "Task" [ 583.766658] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 583.775676] env[62503]: DEBUG oslo_vmware.api [None req-2ad81782-4e29-40f3-9961-e4f97d1aa36a tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Task: {'id': task-1387696, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 583.783551] env[62503]: DEBUG nova.compute.manager [None req-14a931ce-672d-410b-8e2f-8af21632175d tempest-VolumesAdminNegativeTest-585860034 tempest-VolumesAdminNegativeTest-585860034-project-member] [instance: 59fd4a4a-20f5-4b8f-970a-acfc882f45a3] Start spawning the instance on the hypervisor. {{(pid=62503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2641}} [ 583.786363] env[62503]: DEBUG oslo_concurrency.lockutils [None req-8e02d996-bd10-462a-bd9c-912409f6543d tempest-ServersAdminTestJSON-732764820 tempest-ServersAdminTestJSON-732764820-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.052s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 583.788563] env[62503]: ERROR nova.compute.manager [None req-8e02d996-bd10-462a-bd9c-912409f6543d tempest-ServersAdminTestJSON-732764820 tempest-ServersAdminTestJSON-732764820-project-member] [instance: 24e6abd6-fb6f-49ba-b01b-3977ff205fef] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port f85bee01-59b6-479d-9ed7-a1360aedabcf, please check neutron logs for more information. [ 583.788563] env[62503]: ERROR nova.compute.manager [instance: 24e6abd6-fb6f-49ba-b01b-3977ff205fef] Traceback (most recent call last): [ 583.788563] env[62503]: ERROR nova.compute.manager [instance: 24e6abd6-fb6f-49ba-b01b-3977ff205fef] File "/opt/stack/nova/nova/compute/manager.py", line 2644, in _build_and_run_instance [ 583.788563] env[62503]: ERROR nova.compute.manager [instance: 24e6abd6-fb6f-49ba-b01b-3977ff205fef] self.driver.spawn(context, instance, image_meta, [ 583.788563] env[62503]: ERROR nova.compute.manager [instance: 24e6abd6-fb6f-49ba-b01b-3977ff205fef] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 583.788563] env[62503]: ERROR nova.compute.manager [instance: 24e6abd6-fb6f-49ba-b01b-3977ff205fef] self._vmops.spawn(context, instance, image_meta, injected_files, [ 583.788563] env[62503]: ERROR nova.compute.manager [instance: 24e6abd6-fb6f-49ba-b01b-3977ff205fef] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 583.788563] env[62503]: ERROR nova.compute.manager [instance: 24e6abd6-fb6f-49ba-b01b-3977ff205fef] vm_ref = self.build_virtual_machine(instance, [ 583.788563] env[62503]: ERROR nova.compute.manager [instance: 24e6abd6-fb6f-49ba-b01b-3977ff205fef] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 583.788563] env[62503]: ERROR nova.compute.manager [instance: 24e6abd6-fb6f-49ba-b01b-3977ff205fef] vif_infos = vmwarevif.get_vif_info(self._session, [ 583.788563] env[62503]: ERROR nova.compute.manager [instance: 24e6abd6-fb6f-49ba-b01b-3977ff205fef] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 583.788997] env[62503]: ERROR nova.compute.manager [instance: 24e6abd6-fb6f-49ba-b01b-3977ff205fef] for vif in network_info: [ 583.788997] env[62503]: ERROR nova.compute.manager [instance: 24e6abd6-fb6f-49ba-b01b-3977ff205fef] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 583.788997] env[62503]: ERROR nova.compute.manager [instance: 24e6abd6-fb6f-49ba-b01b-3977ff205fef] return self._sync_wrapper(fn, *args, **kwargs) [ 583.788997] env[62503]: ERROR nova.compute.manager [instance: 24e6abd6-fb6f-49ba-b01b-3977ff205fef] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 583.788997] env[62503]: ERROR nova.compute.manager [instance: 24e6abd6-fb6f-49ba-b01b-3977ff205fef] self.wait() [ 583.788997] env[62503]: ERROR nova.compute.manager [instance: 24e6abd6-fb6f-49ba-b01b-3977ff205fef] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 583.788997] env[62503]: ERROR nova.compute.manager [instance: 24e6abd6-fb6f-49ba-b01b-3977ff205fef] self[:] = self._gt.wait() [ 583.788997] env[62503]: ERROR nova.compute.manager [instance: 24e6abd6-fb6f-49ba-b01b-3977ff205fef] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 583.788997] env[62503]: ERROR nova.compute.manager [instance: 24e6abd6-fb6f-49ba-b01b-3977ff205fef] return self._exit_event.wait() [ 583.788997] env[62503]: ERROR nova.compute.manager [instance: 24e6abd6-fb6f-49ba-b01b-3977ff205fef] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 583.788997] env[62503]: ERROR nova.compute.manager [instance: 24e6abd6-fb6f-49ba-b01b-3977ff205fef] result = hub.switch() [ 583.788997] env[62503]: ERROR nova.compute.manager [instance: 24e6abd6-fb6f-49ba-b01b-3977ff205fef] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 583.788997] env[62503]: ERROR nova.compute.manager [instance: 24e6abd6-fb6f-49ba-b01b-3977ff205fef] return self.greenlet.switch() [ 583.789400] env[62503]: ERROR nova.compute.manager [instance: 24e6abd6-fb6f-49ba-b01b-3977ff205fef] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 583.789400] env[62503]: ERROR nova.compute.manager [instance: 24e6abd6-fb6f-49ba-b01b-3977ff205fef] result = function(*args, **kwargs) [ 583.789400] env[62503]: ERROR nova.compute.manager [instance: 24e6abd6-fb6f-49ba-b01b-3977ff205fef] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 583.789400] env[62503]: ERROR nova.compute.manager [instance: 24e6abd6-fb6f-49ba-b01b-3977ff205fef] return func(*args, **kwargs) [ 583.789400] env[62503]: ERROR nova.compute.manager [instance: 24e6abd6-fb6f-49ba-b01b-3977ff205fef] File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 583.789400] env[62503]: ERROR nova.compute.manager [instance: 24e6abd6-fb6f-49ba-b01b-3977ff205fef] raise e [ 583.789400] env[62503]: ERROR nova.compute.manager [instance: 24e6abd6-fb6f-49ba-b01b-3977ff205fef] File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 583.789400] env[62503]: ERROR nova.compute.manager [instance: 24e6abd6-fb6f-49ba-b01b-3977ff205fef] nwinfo = self.network_api.allocate_for_instance( [ 583.789400] env[62503]: ERROR nova.compute.manager [instance: 24e6abd6-fb6f-49ba-b01b-3977ff205fef] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 583.789400] env[62503]: ERROR nova.compute.manager [instance: 24e6abd6-fb6f-49ba-b01b-3977ff205fef] created_port_ids = self._update_ports_for_instance( [ 583.789400] env[62503]: ERROR nova.compute.manager [instance: 24e6abd6-fb6f-49ba-b01b-3977ff205fef] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 583.789400] env[62503]: ERROR nova.compute.manager [instance: 24e6abd6-fb6f-49ba-b01b-3977ff205fef] with excutils.save_and_reraise_exception(): [ 583.789400] env[62503]: ERROR nova.compute.manager [instance: 24e6abd6-fb6f-49ba-b01b-3977ff205fef] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 583.790007] env[62503]: ERROR nova.compute.manager [instance: 24e6abd6-fb6f-49ba-b01b-3977ff205fef] self.force_reraise() [ 583.790007] env[62503]: ERROR nova.compute.manager [instance: 24e6abd6-fb6f-49ba-b01b-3977ff205fef] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 583.790007] env[62503]: ERROR nova.compute.manager [instance: 24e6abd6-fb6f-49ba-b01b-3977ff205fef] raise self.value [ 583.790007] env[62503]: ERROR nova.compute.manager [instance: 24e6abd6-fb6f-49ba-b01b-3977ff205fef] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 583.790007] env[62503]: ERROR nova.compute.manager [instance: 24e6abd6-fb6f-49ba-b01b-3977ff205fef] updated_port = self._update_port( [ 583.790007] env[62503]: ERROR nova.compute.manager [instance: 24e6abd6-fb6f-49ba-b01b-3977ff205fef] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 583.790007] env[62503]: ERROR nova.compute.manager [instance: 24e6abd6-fb6f-49ba-b01b-3977ff205fef] _ensure_no_port_binding_failure(port) [ 583.790007] env[62503]: ERROR nova.compute.manager [instance: 24e6abd6-fb6f-49ba-b01b-3977ff205fef] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 583.790007] env[62503]: ERROR nova.compute.manager [instance: 24e6abd6-fb6f-49ba-b01b-3977ff205fef] raise exception.PortBindingFailed(port_id=port['id']) [ 583.790007] env[62503]: ERROR nova.compute.manager [instance: 24e6abd6-fb6f-49ba-b01b-3977ff205fef] nova.exception.PortBindingFailed: Binding failed for port f85bee01-59b6-479d-9ed7-a1360aedabcf, please check neutron logs for more information. [ 583.790007] env[62503]: ERROR nova.compute.manager [instance: 24e6abd6-fb6f-49ba-b01b-3977ff205fef] [ 583.791658] env[62503]: DEBUG nova.compute.utils [None req-8e02d996-bd10-462a-bd9c-912409f6543d tempest-ServersAdminTestJSON-732764820 tempest-ServersAdminTestJSON-732764820-project-member] [instance: 24e6abd6-fb6f-49ba-b01b-3977ff205fef] Binding failed for port f85bee01-59b6-479d-9ed7-a1360aedabcf, please check neutron logs for more information. {{(pid=62503) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 583.792966] env[62503]: DEBUG oslo_concurrency.lockutils [None req-be334e62-09cf-48bb-879c-48dba1066c83 tempest-ServersAdminTestJSON-732764820 tempest-ServersAdminTestJSON-732764820-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 16.627s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 583.796832] env[62503]: DEBUG nova.compute.manager [None req-8e02d996-bd10-462a-bd9c-912409f6543d tempest-ServersAdminTestJSON-732764820 tempest-ServersAdminTestJSON-732764820-project-member] [instance: 24e6abd6-fb6f-49ba-b01b-3977ff205fef] Build of instance 24e6abd6-fb6f-49ba-b01b-3977ff205fef was re-scheduled: Binding failed for port f85bee01-59b6-479d-9ed7-a1360aedabcf, please check neutron logs for more information. {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2483}} [ 583.796832] env[62503]: DEBUG nova.compute.manager [None req-8e02d996-bd10-462a-bd9c-912409f6543d tempest-ServersAdminTestJSON-732764820 tempest-ServersAdminTestJSON-732764820-project-member] [instance: 24e6abd6-fb6f-49ba-b01b-3977ff205fef] Unplugging VIFs for instance {{(pid=62503) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3009}} [ 583.797394] env[62503]: DEBUG oslo_concurrency.lockutils [None req-8e02d996-bd10-462a-bd9c-912409f6543d tempest-ServersAdminTestJSON-732764820 tempest-ServersAdminTestJSON-732764820-project-member] Acquiring lock "refresh_cache-24e6abd6-fb6f-49ba-b01b-3977ff205fef" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 583.797394] env[62503]: DEBUG oslo_concurrency.lockutils [None req-8e02d996-bd10-462a-bd9c-912409f6543d tempest-ServersAdminTestJSON-732764820 tempest-ServersAdminTestJSON-732764820-project-member] Acquired lock "refresh_cache-24e6abd6-fb6f-49ba-b01b-3977ff205fef" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 583.797536] env[62503]: DEBUG nova.network.neutron [None req-8e02d996-bd10-462a-bd9c-912409f6543d tempest-ServersAdminTestJSON-732764820 tempest-ServersAdminTestJSON-732764820-project-member] [instance: 24e6abd6-fb6f-49ba-b01b-3977ff205fef] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 583.842692] env[62503]: DEBUG nova.virt.hardware [None req-14a931ce-672d-410b-8e2f-8af21632175d tempest-VolumesAdminNegativeTest-585860034 tempest-VolumesAdminNegativeTest-585860034-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:26:20Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:26:03Z,direct_url=,disk_format='vmdk',id=8150ca02-f879-471d-8913-459408f127a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26d9ee75d25b4018b8bb1fb11c8bd98c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:26:04Z,virtual_size=,visibility=), allow threads: False {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 583.842888] env[62503]: DEBUG nova.virt.hardware [None req-14a931ce-672d-410b-8e2f-8af21632175d tempest-VolumesAdminNegativeTest-585860034 tempest-VolumesAdminNegativeTest-585860034-project-member] Flavor limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 583.842930] env[62503]: DEBUG nova.virt.hardware [None req-14a931ce-672d-410b-8e2f-8af21632175d tempest-VolumesAdminNegativeTest-585860034 tempest-VolumesAdminNegativeTest-585860034-project-member] Image limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 583.843117] env[62503]: DEBUG nova.virt.hardware [None req-14a931ce-672d-410b-8e2f-8af21632175d tempest-VolumesAdminNegativeTest-585860034 tempest-VolumesAdminNegativeTest-585860034-project-member] Flavor pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 583.843275] env[62503]: DEBUG nova.virt.hardware [None req-14a931ce-672d-410b-8e2f-8af21632175d tempest-VolumesAdminNegativeTest-585860034 tempest-VolumesAdminNegativeTest-585860034-project-member] Image pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 583.843439] env[62503]: DEBUG nova.virt.hardware [None req-14a931ce-672d-410b-8e2f-8af21632175d tempest-VolumesAdminNegativeTest-585860034 tempest-VolumesAdminNegativeTest-585860034-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 583.843737] env[62503]: DEBUG nova.virt.hardware [None req-14a931ce-672d-410b-8e2f-8af21632175d tempest-VolumesAdminNegativeTest-585860034 tempest-VolumesAdminNegativeTest-585860034-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 583.843910] env[62503]: DEBUG nova.virt.hardware [None req-14a931ce-672d-410b-8e2f-8af21632175d tempest-VolumesAdminNegativeTest-585860034 tempest-VolumesAdminNegativeTest-585860034-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 583.844252] env[62503]: DEBUG nova.virt.hardware [None req-14a931ce-672d-410b-8e2f-8af21632175d tempest-VolumesAdminNegativeTest-585860034 tempest-VolumesAdminNegativeTest-585860034-project-member] Got 1 possible topologies {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 583.844252] env[62503]: DEBUG nova.virt.hardware [None req-14a931ce-672d-410b-8e2f-8af21632175d tempest-VolumesAdminNegativeTest-585860034 tempest-VolumesAdminNegativeTest-585860034-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 583.844379] env[62503]: DEBUG nova.virt.hardware [None req-14a931ce-672d-410b-8e2f-8af21632175d tempest-VolumesAdminNegativeTest-585860034 tempest-VolumesAdminNegativeTest-585860034-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 583.845284] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e267eda9-b7b3-48e6-84e8-036d64d37653 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.857525] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c3738cf-a169-42b7-b534-888003a5b5df {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.000411] env[62503]: DEBUG nova.network.neutron [None req-14a931ce-672d-410b-8e2f-8af21632175d tempest-VolumesAdminNegativeTest-585860034 tempest-VolumesAdminNegativeTest-585860034-project-member] [instance: 59fd4a4a-20f5-4b8f-970a-acfc882f45a3] Successfully created port: e524eb4d-de41-4da6-9bfa-395d033d0529 {{(pid=62503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 584.062519] env[62503]: DEBUG oslo_concurrency.lockutils [None req-68ed7e64-78f9-431d-ac11-4e539144d770 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 584.278420] env[62503]: DEBUG oslo_vmware.api [None req-2ad81782-4e29-40f3-9961-e4f97d1aa36a tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Task: {'id': task-1387696, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.103527} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 584.278420] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-2ad81782-4e29-40f3-9961-e4f97d1aa36a tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Deleted the datastore file {{(pid=62503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 584.278420] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-2ad81782-4e29-40f3-9961-e4f97d1aa36a tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] [instance: c4a88e75-690f-4bed-a4f9-a0de3b193eff] Deleted contents of the VM from datastore datastore2 {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 584.278420] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-2ad81782-4e29-40f3-9961-e4f97d1aa36a tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] [instance: c4a88e75-690f-4bed-a4f9-a0de3b193eff] Instance destroyed {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 584.325042] env[62503]: DEBUG nova.network.neutron [None req-8e02d996-bd10-462a-bd9c-912409f6543d tempest-ServersAdminTestJSON-732764820 tempest-ServersAdminTestJSON-732764820-project-member] [instance: 24e6abd6-fb6f-49ba-b01b-3977ff205fef] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 584.484094] env[62503]: DEBUG nova.network.neutron [None req-8e02d996-bd10-462a-bd9c-912409f6543d tempest-ServersAdminTestJSON-732764820 tempest-ServersAdminTestJSON-732764820-project-member] [instance: 24e6abd6-fb6f-49ba-b01b-3977ff205fef] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 584.674281] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3916f495-b81e-431b-b1a2-b5c6d2512991 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.685287] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab80f0fc-6509-4892-b082-a52469c87003 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.723219] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a29db9ba-275f-425c-997b-7c66b8747909 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.732341] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e89682e-0075-4050-98dc-3c5cbdba133a {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.745987] env[62503]: DEBUG nova.compute.provider_tree [None req-be334e62-09cf-48bb-879c-48dba1066c83 tempest-ServersAdminTestJSON-732764820 tempest-ServersAdminTestJSON-732764820-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 584.985653] env[62503]: DEBUG oslo_concurrency.lockutils [None req-8e02d996-bd10-462a-bd9c-912409f6543d tempest-ServersAdminTestJSON-732764820 tempest-ServersAdminTestJSON-732764820-project-member] Releasing lock "refresh_cache-24e6abd6-fb6f-49ba-b01b-3977ff205fef" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 584.985841] env[62503]: DEBUG nova.compute.manager [None req-8e02d996-bd10-462a-bd9c-912409f6543d tempest-ServersAdminTestJSON-732764820 tempest-ServersAdminTestJSON-732764820-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62503) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3032}} [ 584.986054] env[62503]: DEBUG nova.compute.manager [None req-8e02d996-bd10-462a-bd9c-912409f6543d tempest-ServersAdminTestJSON-732764820 tempest-ServersAdminTestJSON-732764820-project-member] [instance: 24e6abd6-fb6f-49ba-b01b-3977ff205fef] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 584.986232] env[62503]: DEBUG nova.network.neutron [None req-8e02d996-bd10-462a-bd9c-912409f6543d tempest-ServersAdminTestJSON-732764820 tempest-ServersAdminTestJSON-732764820-project-member] [instance: 24e6abd6-fb6f-49ba-b01b-3977ff205fef] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 585.018448] env[62503]: DEBUG nova.network.neutron [None req-8e02d996-bd10-462a-bd9c-912409f6543d tempest-ServersAdminTestJSON-732764820 tempest-ServersAdminTestJSON-732764820-project-member] [instance: 24e6abd6-fb6f-49ba-b01b-3977ff205fef] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 585.182731] env[62503]: DEBUG oslo_concurrency.lockutils [None req-04b5e2fe-420a-4980-828f-5aaaaac6b6d6 tempest-MigrationsAdminTest-1483146718 tempest-MigrationsAdminTest-1483146718-project-member] Acquiring lock "0af1e65d-ca88-475e-a871-4087bd49cd9d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 585.182978] env[62503]: DEBUG oslo_concurrency.lockutils [None req-04b5e2fe-420a-4980-828f-5aaaaac6b6d6 tempest-MigrationsAdminTest-1483146718 tempest-MigrationsAdminTest-1483146718-project-member] Lock "0af1e65d-ca88-475e-a871-4087bd49cd9d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 585.248736] env[62503]: DEBUG nova.scheduler.client.report [None req-be334e62-09cf-48bb-879c-48dba1066c83 tempest-ServersAdminTestJSON-732764820 tempest-ServersAdminTestJSON-732764820-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 585.320766] env[62503]: DEBUG nova.virt.hardware [None req-2ad81782-4e29-40f3-9961-e4f97d1aa36a tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:26:20Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:26:03Z,direct_url=,disk_format='vmdk',id=8150ca02-f879-471d-8913-459408f127a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26d9ee75d25b4018b8bb1fb11c8bd98c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:26:04Z,virtual_size=,visibility=), allow threads: False {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 585.324135] env[62503]: DEBUG nova.virt.hardware [None req-2ad81782-4e29-40f3-9961-e4f97d1aa36a tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Flavor limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 585.324135] env[62503]: DEBUG nova.virt.hardware [None req-2ad81782-4e29-40f3-9961-e4f97d1aa36a tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Image limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 585.324135] env[62503]: DEBUG nova.virt.hardware [None req-2ad81782-4e29-40f3-9961-e4f97d1aa36a tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Flavor pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 585.324135] env[62503]: DEBUG nova.virt.hardware [None req-2ad81782-4e29-40f3-9961-e4f97d1aa36a tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Image pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 585.324135] env[62503]: DEBUG nova.virt.hardware [None req-2ad81782-4e29-40f3-9961-e4f97d1aa36a tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 585.325049] env[62503]: DEBUG nova.virt.hardware [None req-2ad81782-4e29-40f3-9961-e4f97d1aa36a tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 585.325049] env[62503]: DEBUG nova.virt.hardware [None req-2ad81782-4e29-40f3-9961-e4f97d1aa36a tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 585.325049] env[62503]: DEBUG nova.virt.hardware [None req-2ad81782-4e29-40f3-9961-e4f97d1aa36a tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Got 1 possible topologies {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 585.325049] env[62503]: DEBUG nova.virt.hardware [None req-2ad81782-4e29-40f3-9961-e4f97d1aa36a tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 585.325049] env[62503]: DEBUG nova.virt.hardware [None req-2ad81782-4e29-40f3-9961-e4f97d1aa36a tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 585.325359] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-beff7de0-9270-4aaa-a955-0fcb10bed522 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.333980] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-825043f0-cff7-4bbd-a328-012c2498a7e7 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.353911] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-2ad81782-4e29-40f3-9961-e4f97d1aa36a tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] [instance: c4a88e75-690f-4bed-a4f9-a0de3b193eff] Instance VIF info [] {{(pid=62503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 585.360945] env[62503]: DEBUG oslo.service.loopingcall [None req-2ad81782-4e29-40f3-9961-e4f97d1aa36a tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 585.361856] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c4a88e75-690f-4bed-a4f9-a0de3b193eff] Creating VM on the ESX host {{(pid=62503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 585.361856] env[62503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-83d38777-f7a2-4119-bc01-4593a7117683 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.380174] env[62503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 585.380174] env[62503]: value = "task-1387697" [ 585.380174] env[62503]: _type = "Task" [ 585.380174] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 585.388660] env[62503]: DEBUG oslo_vmware.api [-] Task: {'id': task-1387697, 'name': CreateVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 585.521460] env[62503]: DEBUG nova.network.neutron [None req-8e02d996-bd10-462a-bd9c-912409f6543d tempest-ServersAdminTestJSON-732764820 tempest-ServersAdminTestJSON-732764820-project-member] [instance: 24e6abd6-fb6f-49ba-b01b-3977ff205fef] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 585.757477] env[62503]: DEBUG oslo_concurrency.lockutils [None req-be334e62-09cf-48bb-879c-48dba1066c83 tempest-ServersAdminTestJSON-732764820 tempest-ServersAdminTestJSON-732764820-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.961s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 585.757477] env[62503]: ERROR nova.compute.manager [None req-be334e62-09cf-48bb-879c-48dba1066c83 tempest-ServersAdminTestJSON-732764820 tempest-ServersAdminTestJSON-732764820-project-member] [instance: 78599fa1-be64-4797-92a9-ebc3a40b59a1] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port b4bbbe8d-e2ef-4d38-b0c2-6afa9ec6fcf8, please check neutron logs for more information. [ 585.757477] env[62503]: ERROR nova.compute.manager [instance: 78599fa1-be64-4797-92a9-ebc3a40b59a1] Traceback (most recent call last): [ 585.757477] env[62503]: ERROR nova.compute.manager [instance: 78599fa1-be64-4797-92a9-ebc3a40b59a1] File "/opt/stack/nova/nova/compute/manager.py", line 2644, in _build_and_run_instance [ 585.757477] env[62503]: ERROR nova.compute.manager [instance: 78599fa1-be64-4797-92a9-ebc3a40b59a1] self.driver.spawn(context, instance, image_meta, [ 585.757477] env[62503]: ERROR nova.compute.manager [instance: 78599fa1-be64-4797-92a9-ebc3a40b59a1] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 585.757477] env[62503]: ERROR nova.compute.manager [instance: 78599fa1-be64-4797-92a9-ebc3a40b59a1] self._vmops.spawn(context, instance, image_meta, injected_files, [ 585.757477] env[62503]: ERROR nova.compute.manager [instance: 78599fa1-be64-4797-92a9-ebc3a40b59a1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 585.757477] env[62503]: ERROR nova.compute.manager [instance: 78599fa1-be64-4797-92a9-ebc3a40b59a1] vm_ref = self.build_virtual_machine(instance, [ 585.757749] env[62503]: ERROR nova.compute.manager [instance: 78599fa1-be64-4797-92a9-ebc3a40b59a1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 585.757749] env[62503]: ERROR nova.compute.manager [instance: 78599fa1-be64-4797-92a9-ebc3a40b59a1] vif_infos = vmwarevif.get_vif_info(self._session, [ 585.757749] env[62503]: ERROR nova.compute.manager [instance: 78599fa1-be64-4797-92a9-ebc3a40b59a1] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 585.757749] env[62503]: ERROR nova.compute.manager [instance: 78599fa1-be64-4797-92a9-ebc3a40b59a1] for vif in network_info: [ 585.757749] env[62503]: ERROR nova.compute.manager [instance: 78599fa1-be64-4797-92a9-ebc3a40b59a1] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 585.757749] env[62503]: ERROR nova.compute.manager [instance: 78599fa1-be64-4797-92a9-ebc3a40b59a1] return self._sync_wrapper(fn, *args, **kwargs) [ 585.757749] env[62503]: ERROR nova.compute.manager [instance: 78599fa1-be64-4797-92a9-ebc3a40b59a1] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 585.757749] env[62503]: ERROR nova.compute.manager [instance: 78599fa1-be64-4797-92a9-ebc3a40b59a1] self.wait() [ 585.757749] env[62503]: ERROR nova.compute.manager [instance: 78599fa1-be64-4797-92a9-ebc3a40b59a1] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 585.757749] env[62503]: ERROR nova.compute.manager [instance: 78599fa1-be64-4797-92a9-ebc3a40b59a1] self[:] = self._gt.wait() [ 585.757749] env[62503]: ERROR nova.compute.manager [instance: 78599fa1-be64-4797-92a9-ebc3a40b59a1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 585.757749] env[62503]: ERROR nova.compute.manager [instance: 78599fa1-be64-4797-92a9-ebc3a40b59a1] return self._exit_event.wait() [ 585.757749] env[62503]: ERROR nova.compute.manager [instance: 78599fa1-be64-4797-92a9-ebc3a40b59a1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 585.758145] env[62503]: ERROR nova.compute.manager [instance: 78599fa1-be64-4797-92a9-ebc3a40b59a1] result = hub.switch() [ 585.758145] env[62503]: ERROR nova.compute.manager [instance: 78599fa1-be64-4797-92a9-ebc3a40b59a1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 585.758145] env[62503]: ERROR nova.compute.manager [instance: 78599fa1-be64-4797-92a9-ebc3a40b59a1] return self.greenlet.switch() [ 585.758145] env[62503]: ERROR nova.compute.manager [instance: 78599fa1-be64-4797-92a9-ebc3a40b59a1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 585.758145] env[62503]: ERROR nova.compute.manager [instance: 78599fa1-be64-4797-92a9-ebc3a40b59a1] result = function(*args, **kwargs) [ 585.758145] env[62503]: ERROR nova.compute.manager [instance: 78599fa1-be64-4797-92a9-ebc3a40b59a1] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 585.758145] env[62503]: ERROR nova.compute.manager [instance: 78599fa1-be64-4797-92a9-ebc3a40b59a1] return func(*args, **kwargs) [ 585.758145] env[62503]: ERROR nova.compute.manager [instance: 78599fa1-be64-4797-92a9-ebc3a40b59a1] File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 585.758145] env[62503]: ERROR nova.compute.manager [instance: 78599fa1-be64-4797-92a9-ebc3a40b59a1] raise e [ 585.758145] env[62503]: ERROR nova.compute.manager [instance: 78599fa1-be64-4797-92a9-ebc3a40b59a1] File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 585.758145] env[62503]: ERROR nova.compute.manager [instance: 78599fa1-be64-4797-92a9-ebc3a40b59a1] nwinfo = self.network_api.allocate_for_instance( [ 585.758145] env[62503]: ERROR nova.compute.manager [instance: 78599fa1-be64-4797-92a9-ebc3a40b59a1] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 585.758145] env[62503]: ERROR nova.compute.manager [instance: 78599fa1-be64-4797-92a9-ebc3a40b59a1] created_port_ids = self._update_ports_for_instance( [ 585.758522] env[62503]: ERROR nova.compute.manager [instance: 78599fa1-be64-4797-92a9-ebc3a40b59a1] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 585.758522] env[62503]: ERROR nova.compute.manager [instance: 78599fa1-be64-4797-92a9-ebc3a40b59a1] with excutils.save_and_reraise_exception(): [ 585.758522] env[62503]: ERROR nova.compute.manager [instance: 78599fa1-be64-4797-92a9-ebc3a40b59a1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 585.758522] env[62503]: ERROR nova.compute.manager [instance: 78599fa1-be64-4797-92a9-ebc3a40b59a1] self.force_reraise() [ 585.758522] env[62503]: ERROR nova.compute.manager [instance: 78599fa1-be64-4797-92a9-ebc3a40b59a1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 585.758522] env[62503]: ERROR nova.compute.manager [instance: 78599fa1-be64-4797-92a9-ebc3a40b59a1] raise self.value [ 585.758522] env[62503]: ERROR nova.compute.manager [instance: 78599fa1-be64-4797-92a9-ebc3a40b59a1] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 585.758522] env[62503]: ERROR nova.compute.manager [instance: 78599fa1-be64-4797-92a9-ebc3a40b59a1] updated_port = self._update_port( [ 585.758522] env[62503]: ERROR nova.compute.manager [instance: 78599fa1-be64-4797-92a9-ebc3a40b59a1] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 585.758522] env[62503]: ERROR nova.compute.manager [instance: 78599fa1-be64-4797-92a9-ebc3a40b59a1] _ensure_no_port_binding_failure(port) [ 585.758522] env[62503]: ERROR nova.compute.manager [instance: 78599fa1-be64-4797-92a9-ebc3a40b59a1] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 585.758522] env[62503]: ERROR nova.compute.manager [instance: 78599fa1-be64-4797-92a9-ebc3a40b59a1] raise exception.PortBindingFailed(port_id=port['id']) [ 585.758906] env[62503]: ERROR nova.compute.manager [instance: 78599fa1-be64-4797-92a9-ebc3a40b59a1] nova.exception.PortBindingFailed: Binding failed for port b4bbbe8d-e2ef-4d38-b0c2-6afa9ec6fcf8, please check neutron logs for more information. [ 585.758906] env[62503]: ERROR nova.compute.manager [instance: 78599fa1-be64-4797-92a9-ebc3a40b59a1] [ 585.758906] env[62503]: DEBUG nova.compute.utils [None req-be334e62-09cf-48bb-879c-48dba1066c83 tempest-ServersAdminTestJSON-732764820 tempest-ServersAdminTestJSON-732764820-project-member] [instance: 78599fa1-be64-4797-92a9-ebc3a40b59a1] Binding failed for port b4bbbe8d-e2ef-4d38-b0c2-6afa9ec6fcf8, please check neutron logs for more information. {{(pid=62503) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 585.758906] env[62503]: DEBUG oslo_concurrency.lockutils [None req-eb0402ba-4447-4563-bad3-226e21db94d7 tempest-ServersAdminNegativeTestJSON-275877041 tempest-ServersAdminNegativeTestJSON-275877041-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 18.542s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 585.763027] env[62503]: DEBUG nova.compute.manager [None req-be334e62-09cf-48bb-879c-48dba1066c83 tempest-ServersAdminTestJSON-732764820 tempest-ServersAdminTestJSON-732764820-project-member] [instance: 78599fa1-be64-4797-92a9-ebc3a40b59a1] Build of instance 78599fa1-be64-4797-92a9-ebc3a40b59a1 was re-scheduled: Binding failed for port b4bbbe8d-e2ef-4d38-b0c2-6afa9ec6fcf8, please check neutron logs for more information. {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2483}} [ 585.763027] env[62503]: DEBUG nova.compute.manager [None req-be334e62-09cf-48bb-879c-48dba1066c83 tempest-ServersAdminTestJSON-732764820 tempest-ServersAdminTestJSON-732764820-project-member] [instance: 78599fa1-be64-4797-92a9-ebc3a40b59a1] Unplugging VIFs for instance {{(pid=62503) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3009}} [ 585.763793] env[62503]: DEBUG oslo_concurrency.lockutils [None req-be334e62-09cf-48bb-879c-48dba1066c83 tempest-ServersAdminTestJSON-732764820 tempest-ServersAdminTestJSON-732764820-project-member] Acquiring lock "refresh_cache-78599fa1-be64-4797-92a9-ebc3a40b59a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 585.764102] env[62503]: DEBUG oslo_concurrency.lockutils [None req-be334e62-09cf-48bb-879c-48dba1066c83 tempest-ServersAdminTestJSON-732764820 tempest-ServersAdminTestJSON-732764820-project-member] Acquired lock "refresh_cache-78599fa1-be64-4797-92a9-ebc3a40b59a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 585.764373] env[62503]: DEBUG nova.network.neutron [None req-be334e62-09cf-48bb-879c-48dba1066c83 tempest-ServersAdminTestJSON-732764820 tempest-ServersAdminTestJSON-732764820-project-member] [instance: 78599fa1-be64-4797-92a9-ebc3a40b59a1] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 585.893053] env[62503]: DEBUG oslo_vmware.api [-] Task: {'id': task-1387697, 'name': CreateVM_Task, 'duration_secs': 0.29299} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 585.893236] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c4a88e75-690f-4bed-a4f9-a0de3b193eff] Created VM on the ESX host {{(pid=62503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 585.893657] env[62503]: DEBUG oslo_concurrency.lockutils [None req-2ad81782-4e29-40f3-9961-e4f97d1aa36a tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 585.893851] env[62503]: DEBUG oslo_concurrency.lockutils [None req-2ad81782-4e29-40f3-9961-e4f97d1aa36a tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Acquired lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 585.894243] env[62503]: DEBUG oslo_concurrency.lockutils [None req-2ad81782-4e29-40f3-9961-e4f97d1aa36a tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 585.894526] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ac7282da-1807-4b8b-b5df-e56d2722d1de {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.899897] env[62503]: DEBUG oslo_vmware.api [None req-2ad81782-4e29-40f3-9961-e4f97d1aa36a tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Waiting for the task: (returnval){ [ 585.899897] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]5271b074-d344-58e9-2a91-77b428b1bb89" [ 585.899897] env[62503]: _type = "Task" [ 585.899897] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 585.909423] env[62503]: DEBUG oslo_vmware.api [None req-2ad81782-4e29-40f3-9961-e4f97d1aa36a tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]5271b074-d344-58e9-2a91-77b428b1bb89, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 586.025031] env[62503]: INFO nova.compute.manager [None req-8e02d996-bd10-462a-bd9c-912409f6543d tempest-ServersAdminTestJSON-732764820 tempest-ServersAdminTestJSON-732764820-project-member] [instance: 24e6abd6-fb6f-49ba-b01b-3977ff205fef] Took 1.04 seconds to deallocate network for instance. [ 586.147436] env[62503]: DEBUG oslo_concurrency.lockutils [None req-ffbee50c-b7fd-4096-9606-99a0d4599842 tempest-ServersTestJSON-1104182753 tempest-ServersTestJSON-1104182753-project-member] Acquiring lock "d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 586.147436] env[62503]: DEBUG oslo_concurrency.lockutils [None req-ffbee50c-b7fd-4096-9606-99a0d4599842 tempest-ServersTestJSON-1104182753 tempest-ServersTestJSON-1104182753-project-member] Lock "d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 586.294159] env[62503]: DEBUG nova.network.neutron [None req-be334e62-09cf-48bb-879c-48dba1066c83 tempest-ServersAdminTestJSON-732764820 tempest-ServersAdminTestJSON-732764820-project-member] [instance: 78599fa1-be64-4797-92a9-ebc3a40b59a1] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 586.412290] env[62503]: DEBUG oslo_vmware.api [None req-2ad81782-4e29-40f3-9961-e4f97d1aa36a tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]5271b074-d344-58e9-2a91-77b428b1bb89, 'name': SearchDatastore_Task, 'duration_secs': 0.008492} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 586.412938] env[62503]: DEBUG oslo_concurrency.lockutils [None req-2ad81782-4e29-40f3-9961-e4f97d1aa36a tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Releasing lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 586.413305] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-2ad81782-4e29-40f3-9961-e4f97d1aa36a tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] [instance: c4a88e75-690f-4bed-a4f9-a0de3b193eff] Processing image 8150ca02-f879-471d-8913-459408f127a1 {{(pid=62503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 586.413656] env[62503]: DEBUG oslo_concurrency.lockutils [None req-2ad81782-4e29-40f3-9961-e4f97d1aa36a tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 586.413924] env[62503]: DEBUG oslo_concurrency.lockutils [None req-2ad81782-4e29-40f3-9961-e4f97d1aa36a tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Acquired lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 586.414220] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-2ad81782-4e29-40f3-9961-e4f97d1aa36a tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 586.414634] env[62503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1348aee6-a387-4c5d-a249-083661b5e457 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.426716] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-2ad81782-4e29-40f3-9961-e4f97d1aa36a tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 586.427179] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-2ad81782-4e29-40f3-9961-e4f97d1aa36a tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 586.428303] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-28dd7cc7-789e-44d6-abaa-dca5dab1bfa4 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.434039] env[62503]: DEBUG oslo_vmware.api [None req-2ad81782-4e29-40f3-9961-e4f97d1aa36a tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Waiting for the task: (returnval){ [ 586.434039] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52ba03a9-189a-7267-5e10-f207d0798f7d" [ 586.434039] env[62503]: _type = "Task" [ 586.434039] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 586.445382] env[62503]: DEBUG oslo_vmware.api [None req-2ad81782-4e29-40f3-9961-e4f97d1aa36a tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52ba03a9-189a-7267-5e10-f207d0798f7d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 586.472278] env[62503]: DEBUG nova.network.neutron [None req-be334e62-09cf-48bb-879c-48dba1066c83 tempest-ServersAdminTestJSON-732764820 tempest-ServersAdminTestJSON-732764820-project-member] [instance: 78599fa1-be64-4797-92a9-ebc3a40b59a1] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 586.667092] env[62503]: DEBUG nova.compute.manager [req-c15cad8f-3ee3-4576-a8b0-d7b0fdddc797 req-6931b465-cecb-4aef-9794-042bff8cc29f service nova] [instance: 59fd4a4a-20f5-4b8f-970a-acfc882f45a3] Received event network-changed-e524eb4d-de41-4da6-9bfa-395d033d0529 {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 586.667355] env[62503]: DEBUG nova.compute.manager [req-c15cad8f-3ee3-4576-a8b0-d7b0fdddc797 req-6931b465-cecb-4aef-9794-042bff8cc29f service nova] [instance: 59fd4a4a-20f5-4b8f-970a-acfc882f45a3] Refreshing instance network info cache due to event network-changed-e524eb4d-de41-4da6-9bfa-395d033d0529. {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11432}} [ 586.667861] env[62503]: DEBUG oslo_concurrency.lockutils [req-c15cad8f-3ee3-4576-a8b0-d7b0fdddc797 req-6931b465-cecb-4aef-9794-042bff8cc29f service nova] Acquiring lock "refresh_cache-59fd4a4a-20f5-4b8f-970a-acfc882f45a3" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 586.667906] env[62503]: DEBUG oslo_concurrency.lockutils [req-c15cad8f-3ee3-4576-a8b0-d7b0fdddc797 req-6931b465-cecb-4aef-9794-042bff8cc29f service nova] Acquired lock "refresh_cache-59fd4a4a-20f5-4b8f-970a-acfc882f45a3" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 586.668105] env[62503]: DEBUG nova.network.neutron [req-c15cad8f-3ee3-4576-a8b0-d7b0fdddc797 req-6931b465-cecb-4aef-9794-042bff8cc29f service nova] [instance: 59fd4a4a-20f5-4b8f-970a-acfc882f45a3] Refreshing network info cache for port e524eb4d-de41-4da6-9bfa-395d033d0529 {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 586.749158] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1591393-5660-45b9-8b81-13d8dd8462fd {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.756771] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0110ceaa-594d-4e9c-8456-7a63c163b39c {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.789376] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63f25991-342f-444d-ad88-b06489b2f522 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.796767] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-831d6b40-4816-4005-9e6f-f2fbff84821a {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.811462] env[62503]: DEBUG nova.compute.provider_tree [None req-eb0402ba-4447-4563-bad3-226e21db94d7 tempest-ServersAdminNegativeTestJSON-275877041 tempest-ServersAdminNegativeTestJSON-275877041-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 586.898991] env[62503]: ERROR nova.compute.manager [None req-14a931ce-672d-410b-8e2f-8af21632175d tempest-VolumesAdminNegativeTest-585860034 tempest-VolumesAdminNegativeTest-585860034-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port e524eb4d-de41-4da6-9bfa-395d033d0529, please check neutron logs for more information. [ 586.898991] env[62503]: ERROR nova.compute.manager Traceback (most recent call last): [ 586.898991] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 586.898991] env[62503]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 586.898991] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 586.898991] env[62503]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 586.898991] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 586.898991] env[62503]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 586.898991] env[62503]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 586.898991] env[62503]: ERROR nova.compute.manager self.force_reraise() [ 586.898991] env[62503]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 586.898991] env[62503]: ERROR nova.compute.manager raise self.value [ 586.898991] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 586.898991] env[62503]: ERROR nova.compute.manager updated_port = self._update_port( [ 586.898991] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 586.898991] env[62503]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 586.899457] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 586.899457] env[62503]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 586.899457] env[62503]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port e524eb4d-de41-4da6-9bfa-395d033d0529, please check neutron logs for more information. [ 586.899457] env[62503]: ERROR nova.compute.manager [ 586.899457] env[62503]: Traceback (most recent call last): [ 586.899457] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 586.899457] env[62503]: listener.cb(fileno) [ 586.899457] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 586.899457] env[62503]: result = function(*args, **kwargs) [ 586.899457] env[62503]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 586.899457] env[62503]: return func(*args, **kwargs) [ 586.899457] env[62503]: File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 586.899457] env[62503]: raise e [ 586.899457] env[62503]: File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 586.899457] env[62503]: nwinfo = self.network_api.allocate_for_instance( [ 586.899457] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 586.899457] env[62503]: created_port_ids = self._update_ports_for_instance( [ 586.899457] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 586.899457] env[62503]: with excutils.save_and_reraise_exception(): [ 586.899457] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 586.899457] env[62503]: self.force_reraise() [ 586.899457] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 586.899457] env[62503]: raise self.value [ 586.899457] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 586.899457] env[62503]: updated_port = self._update_port( [ 586.899457] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 586.899457] env[62503]: _ensure_no_port_binding_failure(port) [ 586.899457] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 586.899457] env[62503]: raise exception.PortBindingFailed(port_id=port['id']) [ 586.900234] env[62503]: nova.exception.PortBindingFailed: Binding failed for port e524eb4d-de41-4da6-9bfa-395d033d0529, please check neutron logs for more information. [ 586.900234] env[62503]: Removing descriptor: 21 [ 586.900234] env[62503]: ERROR nova.compute.manager [None req-14a931ce-672d-410b-8e2f-8af21632175d tempest-VolumesAdminNegativeTest-585860034 tempest-VolumesAdminNegativeTest-585860034-project-member] [instance: 59fd4a4a-20f5-4b8f-970a-acfc882f45a3] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port e524eb4d-de41-4da6-9bfa-395d033d0529, please check neutron logs for more information. [ 586.900234] env[62503]: ERROR nova.compute.manager [instance: 59fd4a4a-20f5-4b8f-970a-acfc882f45a3] Traceback (most recent call last): [ 586.900234] env[62503]: ERROR nova.compute.manager [instance: 59fd4a4a-20f5-4b8f-970a-acfc882f45a3] File "/opt/stack/nova/nova/compute/manager.py", line 2897, in _build_resources [ 586.900234] env[62503]: ERROR nova.compute.manager [instance: 59fd4a4a-20f5-4b8f-970a-acfc882f45a3] yield resources [ 586.900234] env[62503]: ERROR nova.compute.manager [instance: 59fd4a4a-20f5-4b8f-970a-acfc882f45a3] File "/opt/stack/nova/nova/compute/manager.py", line 2644, in _build_and_run_instance [ 586.900234] env[62503]: ERROR nova.compute.manager [instance: 59fd4a4a-20f5-4b8f-970a-acfc882f45a3] self.driver.spawn(context, instance, image_meta, [ 586.900234] env[62503]: ERROR nova.compute.manager [instance: 59fd4a4a-20f5-4b8f-970a-acfc882f45a3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 586.900234] env[62503]: ERROR nova.compute.manager [instance: 59fd4a4a-20f5-4b8f-970a-acfc882f45a3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 586.900234] env[62503]: ERROR nova.compute.manager [instance: 59fd4a4a-20f5-4b8f-970a-acfc882f45a3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 586.900234] env[62503]: ERROR nova.compute.manager [instance: 59fd4a4a-20f5-4b8f-970a-acfc882f45a3] vm_ref = self.build_virtual_machine(instance, [ 586.900625] env[62503]: ERROR nova.compute.manager [instance: 59fd4a4a-20f5-4b8f-970a-acfc882f45a3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 586.900625] env[62503]: ERROR nova.compute.manager [instance: 59fd4a4a-20f5-4b8f-970a-acfc882f45a3] vif_infos = vmwarevif.get_vif_info(self._session, [ 586.900625] env[62503]: ERROR nova.compute.manager [instance: 59fd4a4a-20f5-4b8f-970a-acfc882f45a3] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 586.900625] env[62503]: ERROR nova.compute.manager [instance: 59fd4a4a-20f5-4b8f-970a-acfc882f45a3] for vif in network_info: [ 586.900625] env[62503]: ERROR nova.compute.manager [instance: 59fd4a4a-20f5-4b8f-970a-acfc882f45a3] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 586.900625] env[62503]: ERROR nova.compute.manager [instance: 59fd4a4a-20f5-4b8f-970a-acfc882f45a3] return self._sync_wrapper(fn, *args, **kwargs) [ 586.900625] env[62503]: ERROR nova.compute.manager [instance: 59fd4a4a-20f5-4b8f-970a-acfc882f45a3] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 586.900625] env[62503]: ERROR nova.compute.manager [instance: 59fd4a4a-20f5-4b8f-970a-acfc882f45a3] self.wait() [ 586.900625] env[62503]: ERROR nova.compute.manager [instance: 59fd4a4a-20f5-4b8f-970a-acfc882f45a3] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 586.900625] env[62503]: ERROR nova.compute.manager [instance: 59fd4a4a-20f5-4b8f-970a-acfc882f45a3] self[:] = self._gt.wait() [ 586.900625] env[62503]: ERROR nova.compute.manager [instance: 59fd4a4a-20f5-4b8f-970a-acfc882f45a3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 586.900625] env[62503]: ERROR nova.compute.manager [instance: 59fd4a4a-20f5-4b8f-970a-acfc882f45a3] return self._exit_event.wait() [ 586.900625] env[62503]: ERROR nova.compute.manager [instance: 59fd4a4a-20f5-4b8f-970a-acfc882f45a3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 586.900979] env[62503]: ERROR nova.compute.manager [instance: 59fd4a4a-20f5-4b8f-970a-acfc882f45a3] result = hub.switch() [ 586.900979] env[62503]: ERROR nova.compute.manager [instance: 59fd4a4a-20f5-4b8f-970a-acfc882f45a3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 586.900979] env[62503]: ERROR nova.compute.manager [instance: 59fd4a4a-20f5-4b8f-970a-acfc882f45a3] return self.greenlet.switch() [ 586.900979] env[62503]: ERROR nova.compute.manager [instance: 59fd4a4a-20f5-4b8f-970a-acfc882f45a3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 586.900979] env[62503]: ERROR nova.compute.manager [instance: 59fd4a4a-20f5-4b8f-970a-acfc882f45a3] result = function(*args, **kwargs) [ 586.900979] env[62503]: ERROR nova.compute.manager [instance: 59fd4a4a-20f5-4b8f-970a-acfc882f45a3] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 586.900979] env[62503]: ERROR nova.compute.manager [instance: 59fd4a4a-20f5-4b8f-970a-acfc882f45a3] return func(*args, **kwargs) [ 586.900979] env[62503]: ERROR nova.compute.manager [instance: 59fd4a4a-20f5-4b8f-970a-acfc882f45a3] File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 586.900979] env[62503]: ERROR nova.compute.manager [instance: 59fd4a4a-20f5-4b8f-970a-acfc882f45a3] raise e [ 586.900979] env[62503]: ERROR nova.compute.manager [instance: 59fd4a4a-20f5-4b8f-970a-acfc882f45a3] File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 586.900979] env[62503]: ERROR nova.compute.manager [instance: 59fd4a4a-20f5-4b8f-970a-acfc882f45a3] nwinfo = self.network_api.allocate_for_instance( [ 586.900979] env[62503]: ERROR nova.compute.manager [instance: 59fd4a4a-20f5-4b8f-970a-acfc882f45a3] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 586.900979] env[62503]: ERROR nova.compute.manager [instance: 59fd4a4a-20f5-4b8f-970a-acfc882f45a3] created_port_ids = self._update_ports_for_instance( [ 586.901342] env[62503]: ERROR nova.compute.manager [instance: 59fd4a4a-20f5-4b8f-970a-acfc882f45a3] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 586.901342] env[62503]: ERROR nova.compute.manager [instance: 59fd4a4a-20f5-4b8f-970a-acfc882f45a3] with excutils.save_and_reraise_exception(): [ 586.901342] env[62503]: ERROR nova.compute.manager [instance: 59fd4a4a-20f5-4b8f-970a-acfc882f45a3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 586.901342] env[62503]: ERROR nova.compute.manager [instance: 59fd4a4a-20f5-4b8f-970a-acfc882f45a3] self.force_reraise() [ 586.901342] env[62503]: ERROR nova.compute.manager [instance: 59fd4a4a-20f5-4b8f-970a-acfc882f45a3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 586.901342] env[62503]: ERROR nova.compute.manager [instance: 59fd4a4a-20f5-4b8f-970a-acfc882f45a3] raise self.value [ 586.901342] env[62503]: ERROR nova.compute.manager [instance: 59fd4a4a-20f5-4b8f-970a-acfc882f45a3] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 586.901342] env[62503]: ERROR nova.compute.manager [instance: 59fd4a4a-20f5-4b8f-970a-acfc882f45a3] updated_port = self._update_port( [ 586.901342] env[62503]: ERROR nova.compute.manager [instance: 59fd4a4a-20f5-4b8f-970a-acfc882f45a3] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 586.901342] env[62503]: ERROR nova.compute.manager [instance: 59fd4a4a-20f5-4b8f-970a-acfc882f45a3] _ensure_no_port_binding_failure(port) [ 586.901342] env[62503]: ERROR nova.compute.manager [instance: 59fd4a4a-20f5-4b8f-970a-acfc882f45a3] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 586.901342] env[62503]: ERROR nova.compute.manager [instance: 59fd4a4a-20f5-4b8f-970a-acfc882f45a3] raise exception.PortBindingFailed(port_id=port['id']) [ 586.901697] env[62503]: ERROR nova.compute.manager [instance: 59fd4a4a-20f5-4b8f-970a-acfc882f45a3] nova.exception.PortBindingFailed: Binding failed for port e524eb4d-de41-4da6-9bfa-395d033d0529, please check neutron logs for more information. [ 586.901697] env[62503]: ERROR nova.compute.manager [instance: 59fd4a4a-20f5-4b8f-970a-acfc882f45a3] [ 586.901697] env[62503]: INFO nova.compute.manager [None req-14a931ce-672d-410b-8e2f-8af21632175d tempest-VolumesAdminNegativeTest-585860034 tempest-VolumesAdminNegativeTest-585860034-project-member] [instance: 59fd4a4a-20f5-4b8f-970a-acfc882f45a3] Terminating instance [ 586.902768] env[62503]: DEBUG oslo_concurrency.lockutils [None req-14a931ce-672d-410b-8e2f-8af21632175d tempest-VolumesAdminNegativeTest-585860034 tempest-VolumesAdminNegativeTest-585860034-project-member] Acquiring lock "refresh_cache-59fd4a4a-20f5-4b8f-970a-acfc882f45a3" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 586.947990] env[62503]: DEBUG oslo_vmware.api [None req-2ad81782-4e29-40f3-9961-e4f97d1aa36a tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52ba03a9-189a-7267-5e10-f207d0798f7d, 'name': SearchDatastore_Task, 'duration_secs': 0.025152} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 586.948945] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e2936025-0f3d-4be6-a04a-5c9bd7700651 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.954408] env[62503]: DEBUG oslo_vmware.api [None req-2ad81782-4e29-40f3-9961-e4f97d1aa36a tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Waiting for the task: (returnval){ [ 586.954408] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]522199f1-104c-c59b-6f0a-6362c094aed5" [ 586.954408] env[62503]: _type = "Task" [ 586.954408] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 586.962576] env[62503]: DEBUG oslo_vmware.api [None req-2ad81782-4e29-40f3-9961-e4f97d1aa36a tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]522199f1-104c-c59b-6f0a-6362c094aed5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 586.974245] env[62503]: DEBUG oslo_concurrency.lockutils [None req-be334e62-09cf-48bb-879c-48dba1066c83 tempest-ServersAdminTestJSON-732764820 tempest-ServersAdminTestJSON-732764820-project-member] Releasing lock "refresh_cache-78599fa1-be64-4797-92a9-ebc3a40b59a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 586.974528] env[62503]: DEBUG nova.compute.manager [None req-be334e62-09cf-48bb-879c-48dba1066c83 tempest-ServersAdminTestJSON-732764820 tempest-ServersAdminTestJSON-732764820-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62503) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3032}} [ 586.974689] env[62503]: DEBUG nova.compute.manager [None req-be334e62-09cf-48bb-879c-48dba1066c83 tempest-ServersAdminTestJSON-732764820 tempest-ServersAdminTestJSON-732764820-project-member] [instance: 78599fa1-be64-4797-92a9-ebc3a40b59a1] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 586.974816] env[62503]: DEBUG nova.network.neutron [None req-be334e62-09cf-48bb-879c-48dba1066c83 tempest-ServersAdminTestJSON-732764820 tempest-ServersAdminTestJSON-732764820-project-member] [instance: 78599fa1-be64-4797-92a9-ebc3a40b59a1] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 587.003432] env[62503]: DEBUG nova.network.neutron [None req-be334e62-09cf-48bb-879c-48dba1066c83 tempest-ServersAdminTestJSON-732764820 tempest-ServersAdminTestJSON-732764820-project-member] [instance: 78599fa1-be64-4797-92a9-ebc3a40b59a1] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 587.078520] env[62503]: INFO nova.scheduler.client.report [None req-8e02d996-bd10-462a-bd9c-912409f6543d tempest-ServersAdminTestJSON-732764820 tempest-ServersAdminTestJSON-732764820-project-member] Deleted allocations for instance 24e6abd6-fb6f-49ba-b01b-3977ff205fef [ 587.203699] env[62503]: DEBUG nova.network.neutron [req-c15cad8f-3ee3-4576-a8b0-d7b0fdddc797 req-6931b465-cecb-4aef-9794-042bff8cc29f service nova] [instance: 59fd4a4a-20f5-4b8f-970a-acfc882f45a3] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 587.309256] env[62503]: DEBUG nova.network.neutron [req-c15cad8f-3ee3-4576-a8b0-d7b0fdddc797 req-6931b465-cecb-4aef-9794-042bff8cc29f service nova] [instance: 59fd4a4a-20f5-4b8f-970a-acfc882f45a3] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 587.316985] env[62503]: DEBUG nova.scheduler.client.report [None req-eb0402ba-4447-4563-bad3-226e21db94d7 tempest-ServersAdminNegativeTestJSON-275877041 tempest-ServersAdminNegativeTestJSON-275877041-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 587.471805] env[62503]: DEBUG oslo_vmware.api [None req-2ad81782-4e29-40f3-9961-e4f97d1aa36a tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]522199f1-104c-c59b-6f0a-6362c094aed5, 'name': SearchDatastore_Task, 'duration_secs': 0.020272} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 587.472176] env[62503]: DEBUG oslo_concurrency.lockutils [None req-2ad81782-4e29-40f3-9961-e4f97d1aa36a tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Releasing lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 587.472449] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ad81782-4e29-40f3-9961-e4f97d1aa36a tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk to [datastore1] c4a88e75-690f-4bed-a4f9-a0de3b193eff/c4a88e75-690f-4bed-a4f9-a0de3b193eff.vmdk {{(pid=62503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 587.472714] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ce41a76a-ace0-4a8b-96bd-1e4a5596a99f {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.482115] env[62503]: DEBUG oslo_vmware.api [None req-2ad81782-4e29-40f3-9961-e4f97d1aa36a tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Waiting for the task: (returnval){ [ 587.482115] env[62503]: value = "task-1387698" [ 587.482115] env[62503]: _type = "Task" [ 587.482115] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 587.495756] env[62503]: DEBUG oslo_vmware.api [None req-2ad81782-4e29-40f3-9961-e4f97d1aa36a tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Task: {'id': task-1387698, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 587.511629] env[62503]: DEBUG nova.network.neutron [None req-be334e62-09cf-48bb-879c-48dba1066c83 tempest-ServersAdminTestJSON-732764820 tempest-ServersAdminTestJSON-732764820-project-member] [instance: 78599fa1-be64-4797-92a9-ebc3a40b59a1] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 587.591038] env[62503]: DEBUG oslo_concurrency.lockutils [None req-8e02d996-bd10-462a-bd9c-912409f6543d tempest-ServersAdminTestJSON-732764820 tempest-ServersAdminTestJSON-732764820-project-member] Lock "24e6abd6-fb6f-49ba-b01b-3977ff205fef" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 46.229s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 587.815709] env[62503]: DEBUG oslo_concurrency.lockutils [req-c15cad8f-3ee3-4576-a8b0-d7b0fdddc797 req-6931b465-cecb-4aef-9794-042bff8cc29f service nova] Releasing lock "refresh_cache-59fd4a4a-20f5-4b8f-970a-acfc882f45a3" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 587.816217] env[62503]: DEBUG oslo_concurrency.lockutils [None req-14a931ce-672d-410b-8e2f-8af21632175d tempest-VolumesAdminNegativeTest-585860034 tempest-VolumesAdminNegativeTest-585860034-project-member] Acquired lock "refresh_cache-59fd4a4a-20f5-4b8f-970a-acfc882f45a3" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 587.816433] env[62503]: DEBUG nova.network.neutron [None req-14a931ce-672d-410b-8e2f-8af21632175d tempest-VolumesAdminNegativeTest-585860034 tempest-VolumesAdminNegativeTest-585860034-project-member] [instance: 59fd4a4a-20f5-4b8f-970a-acfc882f45a3] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 587.819692] env[62503]: DEBUG oslo_concurrency.lockutils [None req-eb0402ba-4447-4563-bad3-226e21db94d7 tempest-ServersAdminNegativeTestJSON-275877041 tempest-ServersAdminNegativeTestJSON-275877041-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.063s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 587.820986] env[62503]: ERROR nova.compute.manager [None req-eb0402ba-4447-4563-bad3-226e21db94d7 tempest-ServersAdminNegativeTestJSON-275877041 tempest-ServersAdminNegativeTestJSON-275877041-project-member] [instance: 9f83ec50-5143-45e1-849a-5c441d2702e2] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 5e215c2e-3299-4635-90c3-831728c1b765, please check neutron logs for more information. [ 587.820986] env[62503]: ERROR nova.compute.manager [instance: 9f83ec50-5143-45e1-849a-5c441d2702e2] Traceback (most recent call last): [ 587.820986] env[62503]: ERROR nova.compute.manager [instance: 9f83ec50-5143-45e1-849a-5c441d2702e2] File "/opt/stack/nova/nova/compute/manager.py", line 2644, in _build_and_run_instance [ 587.820986] env[62503]: ERROR nova.compute.manager [instance: 9f83ec50-5143-45e1-849a-5c441d2702e2] self.driver.spawn(context, instance, image_meta, [ 587.820986] env[62503]: ERROR nova.compute.manager [instance: 9f83ec50-5143-45e1-849a-5c441d2702e2] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 587.820986] env[62503]: ERROR nova.compute.manager [instance: 9f83ec50-5143-45e1-849a-5c441d2702e2] self._vmops.spawn(context, instance, image_meta, injected_files, [ 587.820986] env[62503]: ERROR nova.compute.manager [instance: 9f83ec50-5143-45e1-849a-5c441d2702e2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 587.820986] env[62503]: ERROR nova.compute.manager [instance: 9f83ec50-5143-45e1-849a-5c441d2702e2] vm_ref = self.build_virtual_machine(instance, [ 587.820986] env[62503]: ERROR nova.compute.manager [instance: 9f83ec50-5143-45e1-849a-5c441d2702e2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 587.820986] env[62503]: ERROR nova.compute.manager [instance: 9f83ec50-5143-45e1-849a-5c441d2702e2] vif_infos = vmwarevif.get_vif_info(self._session, [ 587.820986] env[62503]: ERROR nova.compute.manager [instance: 9f83ec50-5143-45e1-849a-5c441d2702e2] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 587.821361] env[62503]: ERROR nova.compute.manager [instance: 9f83ec50-5143-45e1-849a-5c441d2702e2] for vif in network_info: [ 587.821361] env[62503]: ERROR nova.compute.manager [instance: 9f83ec50-5143-45e1-849a-5c441d2702e2] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 587.821361] env[62503]: ERROR nova.compute.manager [instance: 9f83ec50-5143-45e1-849a-5c441d2702e2] return self._sync_wrapper(fn, *args, **kwargs) [ 587.821361] env[62503]: ERROR nova.compute.manager [instance: 9f83ec50-5143-45e1-849a-5c441d2702e2] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 587.821361] env[62503]: ERROR nova.compute.manager [instance: 9f83ec50-5143-45e1-849a-5c441d2702e2] self.wait() [ 587.821361] env[62503]: ERROR nova.compute.manager [instance: 9f83ec50-5143-45e1-849a-5c441d2702e2] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 587.821361] env[62503]: ERROR nova.compute.manager [instance: 9f83ec50-5143-45e1-849a-5c441d2702e2] self[:] = self._gt.wait() [ 587.821361] env[62503]: ERROR nova.compute.manager [instance: 9f83ec50-5143-45e1-849a-5c441d2702e2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 587.821361] env[62503]: ERROR nova.compute.manager [instance: 9f83ec50-5143-45e1-849a-5c441d2702e2] return self._exit_event.wait() [ 587.821361] env[62503]: ERROR nova.compute.manager [instance: 9f83ec50-5143-45e1-849a-5c441d2702e2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 587.821361] env[62503]: ERROR nova.compute.manager [instance: 9f83ec50-5143-45e1-849a-5c441d2702e2] result = hub.switch() [ 587.821361] env[62503]: ERROR nova.compute.manager [instance: 9f83ec50-5143-45e1-849a-5c441d2702e2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 587.821361] env[62503]: ERROR nova.compute.manager [instance: 9f83ec50-5143-45e1-849a-5c441d2702e2] return self.greenlet.switch() [ 587.821713] env[62503]: ERROR nova.compute.manager [instance: 9f83ec50-5143-45e1-849a-5c441d2702e2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 587.821713] env[62503]: ERROR nova.compute.manager [instance: 9f83ec50-5143-45e1-849a-5c441d2702e2] result = function(*args, **kwargs) [ 587.821713] env[62503]: ERROR nova.compute.manager [instance: 9f83ec50-5143-45e1-849a-5c441d2702e2] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 587.821713] env[62503]: ERROR nova.compute.manager [instance: 9f83ec50-5143-45e1-849a-5c441d2702e2] return func(*args, **kwargs) [ 587.821713] env[62503]: ERROR nova.compute.manager [instance: 9f83ec50-5143-45e1-849a-5c441d2702e2] File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 587.821713] env[62503]: ERROR nova.compute.manager [instance: 9f83ec50-5143-45e1-849a-5c441d2702e2] raise e [ 587.821713] env[62503]: ERROR nova.compute.manager [instance: 9f83ec50-5143-45e1-849a-5c441d2702e2] File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 587.821713] env[62503]: ERROR nova.compute.manager [instance: 9f83ec50-5143-45e1-849a-5c441d2702e2] nwinfo = self.network_api.allocate_for_instance( [ 587.821713] env[62503]: ERROR nova.compute.manager [instance: 9f83ec50-5143-45e1-849a-5c441d2702e2] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 587.821713] env[62503]: ERROR nova.compute.manager [instance: 9f83ec50-5143-45e1-849a-5c441d2702e2] created_port_ids = self._update_ports_for_instance( [ 587.821713] env[62503]: ERROR nova.compute.manager [instance: 9f83ec50-5143-45e1-849a-5c441d2702e2] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 587.821713] env[62503]: ERROR nova.compute.manager [instance: 9f83ec50-5143-45e1-849a-5c441d2702e2] with excutils.save_and_reraise_exception(): [ 587.821713] env[62503]: ERROR nova.compute.manager [instance: 9f83ec50-5143-45e1-849a-5c441d2702e2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 587.822066] env[62503]: ERROR nova.compute.manager [instance: 9f83ec50-5143-45e1-849a-5c441d2702e2] self.force_reraise() [ 587.822066] env[62503]: ERROR nova.compute.manager [instance: 9f83ec50-5143-45e1-849a-5c441d2702e2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 587.822066] env[62503]: ERROR nova.compute.manager [instance: 9f83ec50-5143-45e1-849a-5c441d2702e2] raise self.value [ 587.822066] env[62503]: ERROR nova.compute.manager [instance: 9f83ec50-5143-45e1-849a-5c441d2702e2] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 587.822066] env[62503]: ERROR nova.compute.manager [instance: 9f83ec50-5143-45e1-849a-5c441d2702e2] updated_port = self._update_port( [ 587.822066] env[62503]: ERROR nova.compute.manager [instance: 9f83ec50-5143-45e1-849a-5c441d2702e2] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 587.822066] env[62503]: ERROR nova.compute.manager [instance: 9f83ec50-5143-45e1-849a-5c441d2702e2] _ensure_no_port_binding_failure(port) [ 587.822066] env[62503]: ERROR nova.compute.manager [instance: 9f83ec50-5143-45e1-849a-5c441d2702e2] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 587.822066] env[62503]: ERROR nova.compute.manager [instance: 9f83ec50-5143-45e1-849a-5c441d2702e2] raise exception.PortBindingFailed(port_id=port['id']) [ 587.822066] env[62503]: ERROR nova.compute.manager [instance: 9f83ec50-5143-45e1-849a-5c441d2702e2] nova.exception.PortBindingFailed: Binding failed for port 5e215c2e-3299-4635-90c3-831728c1b765, please check neutron logs for more information. [ 587.822066] env[62503]: ERROR nova.compute.manager [instance: 9f83ec50-5143-45e1-849a-5c441d2702e2] [ 587.822369] env[62503]: DEBUG nova.compute.utils [None req-eb0402ba-4447-4563-bad3-226e21db94d7 tempest-ServersAdminNegativeTestJSON-275877041 tempest-ServersAdminNegativeTestJSON-275877041-project-member] [instance: 9f83ec50-5143-45e1-849a-5c441d2702e2] Binding failed for port 5e215c2e-3299-4635-90c3-831728c1b765, please check neutron logs for more information. {{(pid=62503) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 587.822369] env[62503]: DEBUG oslo_concurrency.lockutils [None req-c5889fb0-1c0e-4d03-b6a4-67f76941137f tempest-ServersTestFqdnHostnames-1580407543 tempest-ServersTestFqdnHostnames-1580407543-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.308s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 587.824503] env[62503]: INFO nova.compute.claims [None req-c5889fb0-1c0e-4d03-b6a4-67f76941137f tempest-ServersTestFqdnHostnames-1580407543 tempest-ServersTestFqdnHostnames-1580407543-project-member] [instance: 7a0b2744-2bb0-4eee-9861-418ba67b719c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 587.827483] env[62503]: DEBUG nova.compute.manager [None req-eb0402ba-4447-4563-bad3-226e21db94d7 tempest-ServersAdminNegativeTestJSON-275877041 tempest-ServersAdminNegativeTestJSON-275877041-project-member] [instance: 9f83ec50-5143-45e1-849a-5c441d2702e2] Build of instance 9f83ec50-5143-45e1-849a-5c441d2702e2 was re-scheduled: Binding failed for port 5e215c2e-3299-4635-90c3-831728c1b765, please check neutron logs for more information. {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2483}} [ 587.828364] env[62503]: DEBUG nova.compute.manager [None req-eb0402ba-4447-4563-bad3-226e21db94d7 tempest-ServersAdminNegativeTestJSON-275877041 tempest-ServersAdminNegativeTestJSON-275877041-project-member] [instance: 9f83ec50-5143-45e1-849a-5c441d2702e2] Unplugging VIFs for instance {{(pid=62503) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3009}} [ 587.828438] env[62503]: DEBUG oslo_concurrency.lockutils [None req-eb0402ba-4447-4563-bad3-226e21db94d7 tempest-ServersAdminNegativeTestJSON-275877041 tempest-ServersAdminNegativeTestJSON-275877041-project-member] Acquiring lock "refresh_cache-9f83ec50-5143-45e1-849a-5c441d2702e2" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 587.828590] env[62503]: DEBUG oslo_concurrency.lockutils [None req-eb0402ba-4447-4563-bad3-226e21db94d7 tempest-ServersAdminNegativeTestJSON-275877041 tempest-ServersAdminNegativeTestJSON-275877041-project-member] Acquired lock "refresh_cache-9f83ec50-5143-45e1-849a-5c441d2702e2" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 587.828773] env[62503]: DEBUG nova.network.neutron [None req-eb0402ba-4447-4563-bad3-226e21db94d7 tempest-ServersAdminNegativeTestJSON-275877041 tempest-ServersAdminNegativeTestJSON-275877041-project-member] [instance: 9f83ec50-5143-45e1-849a-5c441d2702e2] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 587.993934] env[62503]: DEBUG oslo_vmware.api [None req-2ad81782-4e29-40f3-9961-e4f97d1aa36a tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Task: {'id': task-1387698, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 588.014777] env[62503]: INFO nova.compute.manager [None req-be334e62-09cf-48bb-879c-48dba1066c83 tempest-ServersAdminTestJSON-732764820 tempest-ServersAdminTestJSON-732764820-project-member] [instance: 78599fa1-be64-4797-92a9-ebc3a40b59a1] Took 1.04 seconds to deallocate network for instance. [ 588.096518] env[62503]: DEBUG nova.compute.manager [None req-6127b9b5-a976-4771-896a-db01ec307bda tempest-ImagesOneServerTestJSON-1249934967 tempest-ImagesOneServerTestJSON-1249934967-project-member] [instance: 1a27278b-b930-4432-90f2-45cdf025c83e] Starting instance... {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 588.358566] env[62503]: DEBUG nova.network.neutron [None req-eb0402ba-4447-4563-bad3-226e21db94d7 tempest-ServersAdminNegativeTestJSON-275877041 tempest-ServersAdminNegativeTestJSON-275877041-project-member] [instance: 9f83ec50-5143-45e1-849a-5c441d2702e2] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 588.367439] env[62503]: DEBUG nova.network.neutron [None req-14a931ce-672d-410b-8e2f-8af21632175d tempest-VolumesAdminNegativeTest-585860034 tempest-VolumesAdminNegativeTest-585860034-project-member] [instance: 59fd4a4a-20f5-4b8f-970a-acfc882f45a3] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 588.461807] env[62503]: DEBUG nova.network.neutron [None req-eb0402ba-4447-4563-bad3-226e21db94d7 tempest-ServersAdminNegativeTestJSON-275877041 tempest-ServersAdminNegativeTestJSON-275877041-project-member] [instance: 9f83ec50-5143-45e1-849a-5c441d2702e2] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 588.494929] env[62503]: DEBUG oslo_vmware.api [None req-2ad81782-4e29-40f3-9961-e4f97d1aa36a tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Task: {'id': task-1387698, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.551977} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 588.495237] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ad81782-4e29-40f3-9961-e4f97d1aa36a tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk to [datastore1] c4a88e75-690f-4bed-a4f9-a0de3b193eff/c4a88e75-690f-4bed-a4f9-a0de3b193eff.vmdk {{(pid=62503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 588.495409] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-2ad81782-4e29-40f3-9961-e4f97d1aa36a tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] [instance: c4a88e75-690f-4bed-a4f9-a0de3b193eff] Extending root virtual disk to 1048576 {{(pid=62503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 588.495670] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ef12b2cb-7875-47d0-a1e3-6d8efa4fd803 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.502247] env[62503]: DEBUG oslo_vmware.api [None req-2ad81782-4e29-40f3-9961-e4f97d1aa36a tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Waiting for the task: (returnval){ [ 588.502247] env[62503]: value = "task-1387699" [ 588.502247] env[62503]: _type = "Task" [ 588.502247] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 588.513333] env[62503]: DEBUG oslo_vmware.api [None req-2ad81782-4e29-40f3-9961-e4f97d1aa36a tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Task: {'id': task-1387699, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 588.548872] env[62503]: DEBUG nova.network.neutron [None req-14a931ce-672d-410b-8e2f-8af21632175d tempest-VolumesAdminNegativeTest-585860034 tempest-VolumesAdminNegativeTest-585860034-project-member] [instance: 59fd4a4a-20f5-4b8f-970a-acfc882f45a3] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 588.626514] env[62503]: DEBUG oslo_concurrency.lockutils [None req-6127b9b5-a976-4771-896a-db01ec307bda tempest-ImagesOneServerTestJSON-1249934967 tempest-ImagesOneServerTestJSON-1249934967-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 588.893644] env[62503]: DEBUG nova.compute.manager [req-dbbdd75e-7988-4aee-adc4-dea728abe2d8 req-df04ac0b-e0fd-4816-bad6-06f81a6d410c service nova] [instance: 59fd4a4a-20f5-4b8f-970a-acfc882f45a3] Received event network-vif-deleted-e524eb4d-de41-4da6-9bfa-395d033d0529 {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 588.965331] env[62503]: DEBUG oslo_concurrency.lockutils [None req-eb0402ba-4447-4563-bad3-226e21db94d7 tempest-ServersAdminNegativeTestJSON-275877041 tempest-ServersAdminNegativeTestJSON-275877041-project-member] Releasing lock "refresh_cache-9f83ec50-5143-45e1-849a-5c441d2702e2" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 588.965331] env[62503]: DEBUG nova.compute.manager [None req-eb0402ba-4447-4563-bad3-226e21db94d7 tempest-ServersAdminNegativeTestJSON-275877041 tempest-ServersAdminNegativeTestJSON-275877041-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62503) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3032}} [ 588.965331] env[62503]: DEBUG nova.compute.manager [None req-eb0402ba-4447-4563-bad3-226e21db94d7 tempest-ServersAdminNegativeTestJSON-275877041 tempest-ServersAdminNegativeTestJSON-275877041-project-member] [instance: 9f83ec50-5143-45e1-849a-5c441d2702e2] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 588.966599] env[62503]: DEBUG nova.network.neutron [None req-eb0402ba-4447-4563-bad3-226e21db94d7 tempest-ServersAdminNegativeTestJSON-275877041 tempest-ServersAdminNegativeTestJSON-275877041-project-member] [instance: 9f83ec50-5143-45e1-849a-5c441d2702e2] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 588.991473] env[62503]: DEBUG nova.network.neutron [None req-eb0402ba-4447-4563-bad3-226e21db94d7 tempest-ServersAdminNegativeTestJSON-275877041 tempest-ServersAdminNegativeTestJSON-275877041-project-member] [instance: 9f83ec50-5143-45e1-849a-5c441d2702e2] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 589.017173] env[62503]: DEBUG oslo_vmware.api [None req-2ad81782-4e29-40f3-9961-e4f97d1aa36a tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Task: {'id': task-1387699, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.079315} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 589.020432] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-2ad81782-4e29-40f3-9961-e4f97d1aa36a tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] [instance: c4a88e75-690f-4bed-a4f9-a0de3b193eff] Extended root virtual disk {{(pid=62503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 589.021430] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2f6e664-ffcd-4174-b430-5e9d0633d507 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.044389] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-2ad81782-4e29-40f3-9961-e4f97d1aa36a tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] [instance: c4a88e75-690f-4bed-a4f9-a0de3b193eff] Reconfiguring VM instance instance-0000000b to attach disk [datastore1] c4a88e75-690f-4bed-a4f9-a0de3b193eff/c4a88e75-690f-4bed-a4f9-a0de3b193eff.vmdk or device None with type sparse {{(pid=62503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 589.047569] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-04f75222-8a82-48a0-a126-a470349405bd {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.062761] env[62503]: DEBUG oslo_concurrency.lockutils [None req-14a931ce-672d-410b-8e2f-8af21632175d tempest-VolumesAdminNegativeTest-585860034 tempest-VolumesAdminNegativeTest-585860034-project-member] Releasing lock "refresh_cache-59fd4a4a-20f5-4b8f-970a-acfc882f45a3" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 589.063160] env[62503]: DEBUG nova.compute.manager [None req-14a931ce-672d-410b-8e2f-8af21632175d tempest-VolumesAdminNegativeTest-585860034 tempest-VolumesAdminNegativeTest-585860034-project-member] [instance: 59fd4a4a-20f5-4b8f-970a-acfc882f45a3] Start destroying the instance on the hypervisor. {{(pid=62503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3156}} [ 589.063544] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-14a931ce-672d-410b-8e2f-8af21632175d tempest-VolumesAdminNegativeTest-585860034 tempest-VolumesAdminNegativeTest-585860034-project-member] [instance: 59fd4a4a-20f5-4b8f-970a-acfc882f45a3] Destroying instance {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 589.063950] env[62503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e6c6ccbb-7c2c-496f-ad13-b1a005f94359 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.068040] env[62503]: INFO nova.scheduler.client.report [None req-be334e62-09cf-48bb-879c-48dba1066c83 tempest-ServersAdminTestJSON-732764820 tempest-ServersAdminTestJSON-732764820-project-member] Deleted allocations for instance 78599fa1-be64-4797-92a9-ebc3a40b59a1 [ 589.073959] env[62503]: DEBUG oslo_vmware.api [None req-2ad81782-4e29-40f3-9961-e4f97d1aa36a tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Waiting for the task: (returnval){ [ 589.073959] env[62503]: value = "task-1387700" [ 589.073959] env[62503]: _type = "Task" [ 589.073959] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 589.085335] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04526a54-8088-45a8-87b2-49997a47a7dc {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.100423] env[62503]: DEBUG oslo_vmware.api [None req-2ad81782-4e29-40f3-9961-e4f97d1aa36a tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Task: {'id': task-1387700, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 589.113379] env[62503]: WARNING nova.virt.vmwareapi.vmops [None req-14a931ce-672d-410b-8e2f-8af21632175d tempest-VolumesAdminNegativeTest-585860034 tempest-VolumesAdminNegativeTest-585860034-project-member] [instance: 59fd4a4a-20f5-4b8f-970a-acfc882f45a3] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 59fd4a4a-20f5-4b8f-970a-acfc882f45a3 could not be found. [ 589.113648] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-14a931ce-672d-410b-8e2f-8af21632175d tempest-VolumesAdminNegativeTest-585860034 tempest-VolumesAdminNegativeTest-585860034-project-member] [instance: 59fd4a4a-20f5-4b8f-970a-acfc882f45a3] Instance destroyed {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 589.114292] env[62503]: INFO nova.compute.manager [None req-14a931ce-672d-410b-8e2f-8af21632175d tempest-VolumesAdminNegativeTest-585860034 tempest-VolumesAdminNegativeTest-585860034-project-member] [instance: 59fd4a4a-20f5-4b8f-970a-acfc882f45a3] Took 0.05 seconds to destroy the instance on the hypervisor. [ 589.114292] env[62503]: DEBUG oslo.service.loopingcall [None req-14a931ce-672d-410b-8e2f-8af21632175d tempest-VolumesAdminNegativeTest-585860034 tempest-VolumesAdminNegativeTest-585860034-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 589.117045] env[62503]: DEBUG nova.compute.manager [-] [instance: 59fd4a4a-20f5-4b8f-970a-acfc882f45a3] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 589.118894] env[62503]: DEBUG nova.network.neutron [-] [instance: 59fd4a4a-20f5-4b8f-970a-acfc882f45a3] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 589.168567] env[62503]: DEBUG nova.network.neutron [-] [instance: 59fd4a4a-20f5-4b8f-970a-acfc882f45a3] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 589.291129] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0404bcec-1be3-4e75-a2dd-b4a551da8f2a {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.301477] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a37e6c5-27f5-4f3a-8644-4aed525ff2ef {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.332039] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9cbf003-3426-496d-bf0f-38c0b36bae91 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.339869] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3b2b616-2c73-4545-b96c-85573617c114 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.353645] env[62503]: DEBUG nova.compute.provider_tree [None req-c5889fb0-1c0e-4d03-b6a4-67f76941137f tempest-ServersTestFqdnHostnames-1580407543 tempest-ServersTestFqdnHostnames-1580407543-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 589.496683] env[62503]: DEBUG nova.network.neutron [None req-eb0402ba-4447-4563-bad3-226e21db94d7 tempest-ServersAdminNegativeTestJSON-275877041 tempest-ServersAdminNegativeTestJSON-275877041-project-member] [instance: 9f83ec50-5143-45e1-849a-5c441d2702e2] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 589.592117] env[62503]: DEBUG oslo_vmware.api [None req-2ad81782-4e29-40f3-9961-e4f97d1aa36a tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Task: {'id': task-1387700, 'name': ReconfigVM_Task, 'duration_secs': 0.292585} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 589.592880] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-2ad81782-4e29-40f3-9961-e4f97d1aa36a tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] [instance: c4a88e75-690f-4bed-a4f9-a0de3b193eff] Reconfigured VM instance instance-0000000b to attach disk [datastore1] c4a88e75-690f-4bed-a4f9-a0de3b193eff/c4a88e75-690f-4bed-a4f9-a0de3b193eff.vmdk or device None with type sparse {{(pid=62503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 589.593106] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-45c80c52-2397-4a41-9b3d-82b235684f09 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.597374] env[62503]: DEBUG oslo_concurrency.lockutils [None req-be334e62-09cf-48bb-879c-48dba1066c83 tempest-ServersAdminTestJSON-732764820 tempest-ServersAdminTestJSON-732764820-project-member] Lock "78599fa1-be64-4797-92a9-ebc3a40b59a1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 45.010s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 589.604090] env[62503]: DEBUG oslo_vmware.api [None req-2ad81782-4e29-40f3-9961-e4f97d1aa36a tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Waiting for the task: (returnval){ [ 589.604090] env[62503]: value = "task-1387701" [ 589.604090] env[62503]: _type = "Task" [ 589.604090] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 589.617210] env[62503]: DEBUG oslo_vmware.api [None req-2ad81782-4e29-40f3-9961-e4f97d1aa36a tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Task: {'id': task-1387701, 'name': Rename_Task} progress is 5%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 589.671092] env[62503]: DEBUG nova.network.neutron [-] [instance: 59fd4a4a-20f5-4b8f-970a-acfc882f45a3] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 589.859197] env[62503]: DEBUG nova.scheduler.client.report [None req-c5889fb0-1c0e-4d03-b6a4-67f76941137f tempest-ServersTestFqdnHostnames-1580407543 tempest-ServersTestFqdnHostnames-1580407543-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 589.998648] env[62503]: INFO nova.compute.manager [None req-eb0402ba-4447-4563-bad3-226e21db94d7 tempest-ServersAdminNegativeTestJSON-275877041 tempest-ServersAdminNegativeTestJSON-275877041-project-member] [instance: 9f83ec50-5143-45e1-849a-5c441d2702e2] Took 1.03 seconds to deallocate network for instance. [ 590.104120] env[62503]: DEBUG nova.compute.manager [None req-733aadb7-44ff-4603-9cb7-3c04ddeea34f tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] [instance: 30befad4-aacb-44d5-87ed-4fc6b0e34bd6] Starting instance... {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 590.121165] env[62503]: DEBUG oslo_vmware.api [None req-2ad81782-4e29-40f3-9961-e4f97d1aa36a tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Task: {'id': task-1387701, 'name': Rename_Task, 'duration_secs': 0.13472} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 590.121165] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ad81782-4e29-40f3-9961-e4f97d1aa36a tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] [instance: c4a88e75-690f-4bed-a4f9-a0de3b193eff] Powering on the VM {{(pid=62503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 590.121165] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8a07cbb4-1bde-47dc-9417-284c93376186 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.128573] env[62503]: DEBUG oslo_vmware.api [None req-2ad81782-4e29-40f3-9961-e4f97d1aa36a tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Waiting for the task: (returnval){ [ 590.128573] env[62503]: value = "task-1387702" [ 590.128573] env[62503]: _type = "Task" [ 590.128573] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 590.139117] env[62503]: DEBUG oslo_vmware.api [None req-2ad81782-4e29-40f3-9961-e4f97d1aa36a tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Task: {'id': task-1387702, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 590.177284] env[62503]: INFO nova.compute.manager [-] [instance: 59fd4a4a-20f5-4b8f-970a-acfc882f45a3] Took 1.06 seconds to deallocate network for instance. [ 590.180423] env[62503]: DEBUG nova.compute.claims [None req-14a931ce-672d-410b-8e2f-8af21632175d tempest-VolumesAdminNegativeTest-585860034 tempest-VolumesAdminNegativeTest-585860034-project-member] [instance: 59fd4a4a-20f5-4b8f-970a-acfc882f45a3] Aborting claim: {{(pid=62503) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 590.180748] env[62503]: DEBUG oslo_concurrency.lockutils [None req-14a931ce-672d-410b-8e2f-8af21632175d tempest-VolumesAdminNegativeTest-585860034 tempest-VolumesAdminNegativeTest-585860034-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 590.368307] env[62503]: DEBUG oslo_concurrency.lockutils [None req-c5889fb0-1c0e-4d03-b6a4-67f76941137f tempest-ServersTestFqdnHostnames-1580407543 tempest-ServersTestFqdnHostnames-1580407543-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.546s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 590.368817] env[62503]: DEBUG nova.compute.manager [None req-c5889fb0-1c0e-4d03-b6a4-67f76941137f tempest-ServersTestFqdnHostnames-1580407543 tempest-ServersTestFqdnHostnames-1580407543-project-member] [instance: 7a0b2744-2bb0-4eee-9861-418ba67b719c] Start building networks asynchronously for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2832}} [ 590.372329] env[62503]: DEBUG oslo_concurrency.lockutils [None req-cc9648da-f367-499f-9ae4-d540c12618f7 tempest-ServerActionsTestJSON-1148358597 tempest-ServerActionsTestJSON-1148358597-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.104s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 590.377194] env[62503]: INFO nova.compute.claims [None req-cc9648da-f367-499f-9ae4-d540c12618f7 tempest-ServerActionsTestJSON-1148358597 tempest-ServerActionsTestJSON-1148358597-project-member] [instance: d8d4c087-9b0f-48c7-bd05-291a7e2a3e83] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 590.644817] env[62503]: DEBUG oslo_vmware.api [None req-2ad81782-4e29-40f3-9961-e4f97d1aa36a tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Task: {'id': task-1387702, 'name': PowerOnVM_Task, 'duration_secs': 0.481123} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 590.647498] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ad81782-4e29-40f3-9961-e4f97d1aa36a tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] [instance: c4a88e75-690f-4bed-a4f9-a0de3b193eff] Powered on the VM {{(pid=62503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 590.647498] env[62503]: DEBUG nova.compute.manager [None req-2ad81782-4e29-40f3-9961-e4f97d1aa36a tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] [instance: c4a88e75-690f-4bed-a4f9-a0de3b193eff] Checking state {{(pid=62503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1794}} [ 590.647498] env[62503]: DEBUG oslo_concurrency.lockutils [None req-733aadb7-44ff-4603-9cb7-3c04ddeea34f tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 590.648142] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57dc2bf9-848d-4325-b465-282c6ce5e4e8 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.888187] env[62503]: DEBUG nova.compute.utils [None req-c5889fb0-1c0e-4d03-b6a4-67f76941137f tempest-ServersTestFqdnHostnames-1580407543 tempest-ServersTestFqdnHostnames-1580407543-project-member] Using /dev/sd instead of None {{(pid=62503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 590.894609] env[62503]: DEBUG nova.compute.manager [None req-c5889fb0-1c0e-4d03-b6a4-67f76941137f tempest-ServersTestFqdnHostnames-1580407543 tempest-ServersTestFqdnHostnames-1580407543-project-member] [instance: 7a0b2744-2bb0-4eee-9861-418ba67b719c] Allocating IP information in the background. {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 590.894609] env[62503]: DEBUG nova.network.neutron [None req-c5889fb0-1c0e-4d03-b6a4-67f76941137f tempest-ServersTestFqdnHostnames-1580407543 tempest-ServersTestFqdnHostnames-1580407543-project-member] [instance: 7a0b2744-2bb0-4eee-9861-418ba67b719c] allocate_for_instance() {{(pid=62503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 590.985962] env[62503]: DEBUG nova.policy [None req-c5889fb0-1c0e-4d03-b6a4-67f76941137f tempest-ServersTestFqdnHostnames-1580407543 tempest-ServersTestFqdnHostnames-1580407543-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4fb27179ab0249199defdb4a35f5c193', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '76c0b6998f784f0c9641af8641203e32', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62503) authorize /opt/stack/nova/nova/policy.py:201}} [ 591.054480] env[62503]: INFO nova.scheduler.client.report [None req-eb0402ba-4447-4563-bad3-226e21db94d7 tempest-ServersAdminNegativeTestJSON-275877041 tempest-ServersAdminNegativeTestJSON-275877041-project-member] Deleted allocations for instance 9f83ec50-5143-45e1-849a-5c441d2702e2 [ 591.169558] env[62503]: DEBUG oslo_concurrency.lockutils [None req-2ad81782-4e29-40f3-9961-e4f97d1aa36a tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 591.400549] env[62503]: DEBUG nova.compute.manager [None req-c5889fb0-1c0e-4d03-b6a4-67f76941137f tempest-ServersTestFqdnHostnames-1580407543 tempest-ServersTestFqdnHostnames-1580407543-project-member] [instance: 7a0b2744-2bb0-4eee-9861-418ba67b719c] Start building block device mappings for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2867}} [ 591.550540] env[62503]: DEBUG nova.network.neutron [None req-c5889fb0-1c0e-4d03-b6a4-67f76941137f tempest-ServersTestFqdnHostnames-1580407543 tempest-ServersTestFqdnHostnames-1580407543-project-member] [instance: 7a0b2744-2bb0-4eee-9861-418ba67b719c] Successfully created port: 8ce7fb1c-b76d-48fe-be4f-bb2287adce87 {{(pid=62503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 591.570415] env[62503]: DEBUG oslo_concurrency.lockutils [None req-eb0402ba-4447-4563-bad3-226e21db94d7 tempest-ServersAdminNegativeTestJSON-275877041 tempest-ServersAdminNegativeTestJSON-275877041-project-member] Lock "9f83ec50-5143-45e1-849a-5c441d2702e2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 45.935s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 591.777881] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d1e9d0b-e0ce-4f76-9df2-b69ddfcdd87f {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.786226] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c45d849-f3ae-41a7-a9a2-a75aadcb8e85 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.828589] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67c7c9aa-c1fe-4371-b579-3556d4e68705 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.838921] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9b4e1a4-ae65-4b90-8ad7-82af9dbc4e81 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.853478] env[62503]: DEBUG nova.compute.provider_tree [None req-cc9648da-f367-499f-9ae4-d540c12618f7 tempest-ServerActionsTestJSON-1148358597 tempest-ServerActionsTestJSON-1148358597-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 592.073103] env[62503]: DEBUG nova.compute.manager [None req-3c8eac85-f4ca-46c6-a47f-7220e201d8ac tempest-FloatingIPsAssociationTestJSON-190004196 tempest-FloatingIPsAssociationTestJSON-190004196-project-member] [instance: a8d8e232-6096-4da3-8f2c-65a5e5f713ae] Starting instance... {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 592.358511] env[62503]: DEBUG nova.scheduler.client.report [None req-cc9648da-f367-499f-9ae4-d540c12618f7 tempest-ServerActionsTestJSON-1148358597 tempest-ServerActionsTestJSON-1148358597-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 592.418070] env[62503]: DEBUG nova.compute.manager [None req-c5889fb0-1c0e-4d03-b6a4-67f76941137f tempest-ServersTestFqdnHostnames-1580407543 tempest-ServersTestFqdnHostnames-1580407543-project-member] [instance: 7a0b2744-2bb0-4eee-9861-418ba67b719c] Start spawning the instance on the hypervisor. {{(pid=62503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2641}} [ 592.446360] env[62503]: DEBUG nova.virt.hardware [None req-c5889fb0-1c0e-4d03-b6a4-67f76941137f tempest-ServersTestFqdnHostnames-1580407543 tempest-ServersTestFqdnHostnames-1580407543-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:26:20Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:26:03Z,direct_url=,disk_format='vmdk',id=8150ca02-f879-471d-8913-459408f127a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26d9ee75d25b4018b8bb1fb11c8bd98c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:26:04Z,virtual_size=,visibility=), allow threads: False {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 592.446808] env[62503]: DEBUG nova.virt.hardware [None req-c5889fb0-1c0e-4d03-b6a4-67f76941137f tempest-ServersTestFqdnHostnames-1580407543 tempest-ServersTestFqdnHostnames-1580407543-project-member] Flavor limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 592.447236] env[62503]: DEBUG nova.virt.hardware [None req-c5889fb0-1c0e-4d03-b6a4-67f76941137f tempest-ServersTestFqdnHostnames-1580407543 tempest-ServersTestFqdnHostnames-1580407543-project-member] Image limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 592.447236] env[62503]: DEBUG nova.virt.hardware [None req-c5889fb0-1c0e-4d03-b6a4-67f76941137f tempest-ServersTestFqdnHostnames-1580407543 tempest-ServersTestFqdnHostnames-1580407543-project-member] Flavor pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 592.447501] env[62503]: DEBUG nova.virt.hardware [None req-c5889fb0-1c0e-4d03-b6a4-67f76941137f tempest-ServersTestFqdnHostnames-1580407543 tempest-ServersTestFqdnHostnames-1580407543-project-member] Image pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 592.448113] env[62503]: DEBUG nova.virt.hardware [None req-c5889fb0-1c0e-4d03-b6a4-67f76941137f tempest-ServersTestFqdnHostnames-1580407543 tempest-ServersTestFqdnHostnames-1580407543-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 592.448113] env[62503]: DEBUG nova.virt.hardware [None req-c5889fb0-1c0e-4d03-b6a4-67f76941137f tempest-ServersTestFqdnHostnames-1580407543 tempest-ServersTestFqdnHostnames-1580407543-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 592.448310] env[62503]: DEBUG nova.virt.hardware [None req-c5889fb0-1c0e-4d03-b6a4-67f76941137f tempest-ServersTestFqdnHostnames-1580407543 tempest-ServersTestFqdnHostnames-1580407543-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 592.448495] env[62503]: DEBUG nova.virt.hardware [None req-c5889fb0-1c0e-4d03-b6a4-67f76941137f tempest-ServersTestFqdnHostnames-1580407543 tempest-ServersTestFqdnHostnames-1580407543-project-member] Got 1 possible topologies {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 592.448817] env[62503]: DEBUG nova.virt.hardware [None req-c5889fb0-1c0e-4d03-b6a4-67f76941137f tempest-ServersTestFqdnHostnames-1580407543 tempest-ServersTestFqdnHostnames-1580407543-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 592.449175] env[62503]: DEBUG nova.virt.hardware [None req-c5889fb0-1c0e-4d03-b6a4-67f76941137f tempest-ServersTestFqdnHostnames-1580407543 tempest-ServersTestFqdnHostnames-1580407543-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 592.450508] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fb74ba6-b2f3-4add-89c2-e66e1028d12e {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.463162] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79dfe0cf-1946-410d-9371-d2fd261c5c2d {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.600533] env[62503]: DEBUG oslo_concurrency.lockutils [None req-3c8eac85-f4ca-46c6-a47f-7220e201d8ac tempest-FloatingIPsAssociationTestJSON-190004196 tempest-FloatingIPsAssociationTestJSON-190004196-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 592.863600] env[62503]: DEBUG oslo_concurrency.lockutils [None req-cc9648da-f367-499f-9ae4-d540c12618f7 tempest-ServerActionsTestJSON-1148358597 tempest-ServerActionsTestJSON-1148358597-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.490s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 592.863600] env[62503]: DEBUG nova.compute.manager [None req-cc9648da-f367-499f-9ae4-d540c12618f7 tempest-ServerActionsTestJSON-1148358597 tempest-ServerActionsTestJSON-1148358597-project-member] [instance: d8d4c087-9b0f-48c7-bd05-291a7e2a3e83] Start building networks asynchronously for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2832}} [ 592.867580] env[62503]: DEBUG oslo_concurrency.lockutils [None req-2db54e89-b7c8-428c-855a-ae3533893b41 tempest-VolumesAssistedSnapshotsTest-64252410 tempest-VolumesAssistedSnapshotsTest-64252410-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.669s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 592.869203] env[62503]: INFO nova.compute.claims [None req-2db54e89-b7c8-428c-855a-ae3533893b41 tempest-VolumesAssistedSnapshotsTest-64252410 tempest-VolumesAssistedSnapshotsTest-64252410-project-member] [instance: 24b4c233-c874-452e-a7fc-492ca2a49a09] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 593.053374] env[62503]: INFO nova.compute.manager [None req-280dd265-183b-4c2b-ae27-d0e1a87186ff tempest-ServersAdmin275Test-1807589186 tempest-ServersAdmin275Test-1807589186-project-admin] [instance: c4a88e75-690f-4bed-a4f9-a0de3b193eff] Rebuilding instance [ 593.115164] env[62503]: DEBUG nova.compute.manager [None req-280dd265-183b-4c2b-ae27-d0e1a87186ff tempest-ServersAdmin275Test-1807589186 tempest-ServersAdmin275Test-1807589186-project-admin] [instance: c4a88e75-690f-4bed-a4f9-a0de3b193eff] Checking state {{(pid=62503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1794}} [ 593.116386] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4045d5da-91e7-446e-b1cd-180cdae8f446 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.378737] env[62503]: DEBUG nova.compute.utils [None req-cc9648da-f367-499f-9ae4-d540c12618f7 tempest-ServerActionsTestJSON-1148358597 tempest-ServerActionsTestJSON-1148358597-project-member] Using /dev/sd instead of None {{(pid=62503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 593.389059] env[62503]: DEBUG nova.compute.manager [None req-cc9648da-f367-499f-9ae4-d540c12618f7 tempest-ServerActionsTestJSON-1148358597 tempest-ServerActionsTestJSON-1148358597-project-member] [instance: d8d4c087-9b0f-48c7-bd05-291a7e2a3e83] Allocating IP information in the background. {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 593.389319] env[62503]: DEBUG nova.network.neutron [None req-cc9648da-f367-499f-9ae4-d540c12618f7 tempest-ServerActionsTestJSON-1148358597 tempest-ServerActionsTestJSON-1148358597-project-member] [instance: d8d4c087-9b0f-48c7-bd05-291a7e2a3e83] allocate_for_instance() {{(pid=62503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 593.483742] env[62503]: DEBUG nova.policy [None req-cc9648da-f367-499f-9ae4-d540c12618f7 tempest-ServerActionsTestJSON-1148358597 tempest-ServerActionsTestJSON-1148358597-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7c54729e113740fcb3197e2e1b109f6d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd4f2f443209c43d0b0199a6908b55d18', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62503) authorize /opt/stack/nova/nova/policy.py:201}} [ 593.536225] env[62503]: ERROR nova.compute.manager [None req-c5889fb0-1c0e-4d03-b6a4-67f76941137f tempest-ServersTestFqdnHostnames-1580407543 tempest-ServersTestFqdnHostnames-1580407543-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 8ce7fb1c-b76d-48fe-be4f-bb2287adce87, please check neutron logs for more information. [ 593.536225] env[62503]: ERROR nova.compute.manager Traceback (most recent call last): [ 593.536225] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 593.536225] env[62503]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 593.536225] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 593.536225] env[62503]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 593.536225] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 593.536225] env[62503]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 593.536225] env[62503]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 593.536225] env[62503]: ERROR nova.compute.manager self.force_reraise() [ 593.536225] env[62503]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 593.536225] env[62503]: ERROR nova.compute.manager raise self.value [ 593.536225] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 593.536225] env[62503]: ERROR nova.compute.manager updated_port = self._update_port( [ 593.536225] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 593.536225] env[62503]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 593.536750] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 593.536750] env[62503]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 593.536750] env[62503]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 8ce7fb1c-b76d-48fe-be4f-bb2287adce87, please check neutron logs for more information. [ 593.536750] env[62503]: ERROR nova.compute.manager [ 593.536750] env[62503]: Traceback (most recent call last): [ 593.536750] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 593.536750] env[62503]: listener.cb(fileno) [ 593.536750] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 593.536750] env[62503]: result = function(*args, **kwargs) [ 593.536750] env[62503]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 593.536750] env[62503]: return func(*args, **kwargs) [ 593.536750] env[62503]: File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 593.536750] env[62503]: raise e [ 593.536750] env[62503]: File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 593.536750] env[62503]: nwinfo = self.network_api.allocate_for_instance( [ 593.536750] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 593.536750] env[62503]: created_port_ids = self._update_ports_for_instance( [ 593.536750] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 593.536750] env[62503]: with excutils.save_and_reraise_exception(): [ 593.536750] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 593.536750] env[62503]: self.force_reraise() [ 593.536750] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 593.536750] env[62503]: raise self.value [ 593.536750] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 593.536750] env[62503]: updated_port = self._update_port( [ 593.536750] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 593.536750] env[62503]: _ensure_no_port_binding_failure(port) [ 593.536750] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 593.536750] env[62503]: raise exception.PortBindingFailed(port_id=port['id']) [ 593.537623] env[62503]: nova.exception.PortBindingFailed: Binding failed for port 8ce7fb1c-b76d-48fe-be4f-bb2287adce87, please check neutron logs for more information. [ 593.537623] env[62503]: Removing descriptor: 21 [ 593.537623] env[62503]: DEBUG oslo_concurrency.lockutils [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] Acquiring lock "f244a7c9-2f39-4f91-aeba-e5f36e7f79ef" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 593.537623] env[62503]: DEBUG oslo_concurrency.lockutils [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] Lock "f244a7c9-2f39-4f91-aeba-e5f36e7f79ef" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 593.537933] env[62503]: ERROR nova.compute.manager [None req-c5889fb0-1c0e-4d03-b6a4-67f76941137f tempest-ServersTestFqdnHostnames-1580407543 tempest-ServersTestFqdnHostnames-1580407543-project-member] [instance: 7a0b2744-2bb0-4eee-9861-418ba67b719c] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 8ce7fb1c-b76d-48fe-be4f-bb2287adce87, please check neutron logs for more information. [ 593.537933] env[62503]: ERROR nova.compute.manager [instance: 7a0b2744-2bb0-4eee-9861-418ba67b719c] Traceback (most recent call last): [ 593.537933] env[62503]: ERROR nova.compute.manager [instance: 7a0b2744-2bb0-4eee-9861-418ba67b719c] File "/opt/stack/nova/nova/compute/manager.py", line 2897, in _build_resources [ 593.537933] env[62503]: ERROR nova.compute.manager [instance: 7a0b2744-2bb0-4eee-9861-418ba67b719c] yield resources [ 593.537933] env[62503]: ERROR nova.compute.manager [instance: 7a0b2744-2bb0-4eee-9861-418ba67b719c] File "/opt/stack/nova/nova/compute/manager.py", line 2644, in _build_and_run_instance [ 593.537933] env[62503]: ERROR nova.compute.manager [instance: 7a0b2744-2bb0-4eee-9861-418ba67b719c] self.driver.spawn(context, instance, image_meta, [ 593.537933] env[62503]: ERROR nova.compute.manager [instance: 7a0b2744-2bb0-4eee-9861-418ba67b719c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 593.537933] env[62503]: ERROR nova.compute.manager [instance: 7a0b2744-2bb0-4eee-9861-418ba67b719c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 593.537933] env[62503]: ERROR nova.compute.manager [instance: 7a0b2744-2bb0-4eee-9861-418ba67b719c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 593.537933] env[62503]: ERROR nova.compute.manager [instance: 7a0b2744-2bb0-4eee-9861-418ba67b719c] vm_ref = self.build_virtual_machine(instance, [ 593.537933] env[62503]: ERROR nova.compute.manager [instance: 7a0b2744-2bb0-4eee-9861-418ba67b719c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 593.538281] env[62503]: ERROR nova.compute.manager [instance: 7a0b2744-2bb0-4eee-9861-418ba67b719c] vif_infos = vmwarevif.get_vif_info(self._session, [ 593.538281] env[62503]: ERROR nova.compute.manager [instance: 7a0b2744-2bb0-4eee-9861-418ba67b719c] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 593.538281] env[62503]: ERROR nova.compute.manager [instance: 7a0b2744-2bb0-4eee-9861-418ba67b719c] for vif in network_info: [ 593.538281] env[62503]: ERROR nova.compute.manager [instance: 7a0b2744-2bb0-4eee-9861-418ba67b719c] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 593.538281] env[62503]: ERROR nova.compute.manager [instance: 7a0b2744-2bb0-4eee-9861-418ba67b719c] return self._sync_wrapper(fn, *args, **kwargs) [ 593.538281] env[62503]: ERROR nova.compute.manager [instance: 7a0b2744-2bb0-4eee-9861-418ba67b719c] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 593.538281] env[62503]: ERROR nova.compute.manager [instance: 7a0b2744-2bb0-4eee-9861-418ba67b719c] self.wait() [ 593.538281] env[62503]: ERROR nova.compute.manager [instance: 7a0b2744-2bb0-4eee-9861-418ba67b719c] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 593.538281] env[62503]: ERROR nova.compute.manager [instance: 7a0b2744-2bb0-4eee-9861-418ba67b719c] self[:] = self._gt.wait() [ 593.538281] env[62503]: ERROR nova.compute.manager [instance: 7a0b2744-2bb0-4eee-9861-418ba67b719c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 593.538281] env[62503]: ERROR nova.compute.manager [instance: 7a0b2744-2bb0-4eee-9861-418ba67b719c] return self._exit_event.wait() [ 593.538281] env[62503]: ERROR nova.compute.manager [instance: 7a0b2744-2bb0-4eee-9861-418ba67b719c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 593.538281] env[62503]: ERROR nova.compute.manager [instance: 7a0b2744-2bb0-4eee-9861-418ba67b719c] result = hub.switch() [ 593.538663] env[62503]: ERROR nova.compute.manager [instance: 7a0b2744-2bb0-4eee-9861-418ba67b719c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 593.538663] env[62503]: ERROR nova.compute.manager [instance: 7a0b2744-2bb0-4eee-9861-418ba67b719c] return self.greenlet.switch() [ 593.538663] env[62503]: ERROR nova.compute.manager [instance: 7a0b2744-2bb0-4eee-9861-418ba67b719c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 593.538663] env[62503]: ERROR nova.compute.manager [instance: 7a0b2744-2bb0-4eee-9861-418ba67b719c] result = function(*args, **kwargs) [ 593.538663] env[62503]: ERROR nova.compute.manager [instance: 7a0b2744-2bb0-4eee-9861-418ba67b719c] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 593.538663] env[62503]: ERROR nova.compute.manager [instance: 7a0b2744-2bb0-4eee-9861-418ba67b719c] return func(*args, **kwargs) [ 593.538663] env[62503]: ERROR nova.compute.manager [instance: 7a0b2744-2bb0-4eee-9861-418ba67b719c] File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 593.538663] env[62503]: ERROR nova.compute.manager [instance: 7a0b2744-2bb0-4eee-9861-418ba67b719c] raise e [ 593.538663] env[62503]: ERROR nova.compute.manager [instance: 7a0b2744-2bb0-4eee-9861-418ba67b719c] File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 593.538663] env[62503]: ERROR nova.compute.manager [instance: 7a0b2744-2bb0-4eee-9861-418ba67b719c] nwinfo = self.network_api.allocate_for_instance( [ 593.538663] env[62503]: ERROR nova.compute.manager [instance: 7a0b2744-2bb0-4eee-9861-418ba67b719c] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 593.538663] env[62503]: ERROR nova.compute.manager [instance: 7a0b2744-2bb0-4eee-9861-418ba67b719c] created_port_ids = self._update_ports_for_instance( [ 593.538663] env[62503]: ERROR nova.compute.manager [instance: 7a0b2744-2bb0-4eee-9861-418ba67b719c] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 593.539023] env[62503]: ERROR nova.compute.manager [instance: 7a0b2744-2bb0-4eee-9861-418ba67b719c] with excutils.save_and_reraise_exception(): [ 593.539023] env[62503]: ERROR nova.compute.manager [instance: 7a0b2744-2bb0-4eee-9861-418ba67b719c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 593.539023] env[62503]: ERROR nova.compute.manager [instance: 7a0b2744-2bb0-4eee-9861-418ba67b719c] self.force_reraise() [ 593.539023] env[62503]: ERROR nova.compute.manager [instance: 7a0b2744-2bb0-4eee-9861-418ba67b719c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 593.539023] env[62503]: ERROR nova.compute.manager [instance: 7a0b2744-2bb0-4eee-9861-418ba67b719c] raise self.value [ 593.539023] env[62503]: ERROR nova.compute.manager [instance: 7a0b2744-2bb0-4eee-9861-418ba67b719c] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 593.539023] env[62503]: ERROR nova.compute.manager [instance: 7a0b2744-2bb0-4eee-9861-418ba67b719c] updated_port = self._update_port( [ 593.539023] env[62503]: ERROR nova.compute.manager [instance: 7a0b2744-2bb0-4eee-9861-418ba67b719c] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 593.539023] env[62503]: ERROR nova.compute.manager [instance: 7a0b2744-2bb0-4eee-9861-418ba67b719c] _ensure_no_port_binding_failure(port) [ 593.539023] env[62503]: ERROR nova.compute.manager [instance: 7a0b2744-2bb0-4eee-9861-418ba67b719c] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 593.539023] env[62503]: ERROR nova.compute.manager [instance: 7a0b2744-2bb0-4eee-9861-418ba67b719c] raise exception.PortBindingFailed(port_id=port['id']) [ 593.539023] env[62503]: ERROR nova.compute.manager [instance: 7a0b2744-2bb0-4eee-9861-418ba67b719c] nova.exception.PortBindingFailed: Binding failed for port 8ce7fb1c-b76d-48fe-be4f-bb2287adce87, please check neutron logs for more information. [ 593.539023] env[62503]: ERROR nova.compute.manager [instance: 7a0b2744-2bb0-4eee-9861-418ba67b719c] [ 593.539394] env[62503]: INFO nova.compute.manager [None req-c5889fb0-1c0e-4d03-b6a4-67f76941137f tempest-ServersTestFqdnHostnames-1580407543 tempest-ServersTestFqdnHostnames-1580407543-project-member] [instance: 7a0b2744-2bb0-4eee-9861-418ba67b719c] Terminating instance [ 593.540368] env[62503]: DEBUG oslo_concurrency.lockutils [None req-c5889fb0-1c0e-4d03-b6a4-67f76941137f tempest-ServersTestFqdnHostnames-1580407543 tempest-ServersTestFqdnHostnames-1580407543-project-member] Acquiring lock "refresh_cache-7a0b2744-2bb0-4eee-9861-418ba67b719c" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 593.540516] env[62503]: DEBUG oslo_concurrency.lockutils [None req-c5889fb0-1c0e-4d03-b6a4-67f76941137f tempest-ServersTestFqdnHostnames-1580407543 tempest-ServersTestFqdnHostnames-1580407543-project-member] Acquired lock "refresh_cache-7a0b2744-2bb0-4eee-9861-418ba67b719c" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 593.540725] env[62503]: DEBUG nova.network.neutron [None req-c5889fb0-1c0e-4d03-b6a4-67f76941137f tempest-ServersTestFqdnHostnames-1580407543 tempest-ServersTestFqdnHostnames-1580407543-project-member] [instance: 7a0b2744-2bb0-4eee-9861-418ba67b719c] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 593.592503] env[62503]: DEBUG oslo_concurrency.lockutils [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] Acquiring lock "0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 593.593925] env[62503]: DEBUG oslo_concurrency.lockutils [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] Lock "0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 593.673124] env[62503]: DEBUG oslo_concurrency.lockutils [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] Acquiring lock "df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 593.673492] env[62503]: DEBUG oslo_concurrency.lockutils [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] Lock "df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 593.896217] env[62503]: DEBUG nova.compute.manager [None req-cc9648da-f367-499f-9ae4-d540c12618f7 tempest-ServerActionsTestJSON-1148358597 tempest-ServerActionsTestJSON-1148358597-project-member] [instance: d8d4c087-9b0f-48c7-bd05-291a7e2a3e83] Start building block device mappings for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2867}} [ 593.972336] env[62503]: DEBUG nova.compute.manager [req-b70b308b-b81b-4627-8d30-d8ee47b01da5 req-cf7328bc-93a6-4320-b302-a62938fb834d service nova] [instance: 7a0b2744-2bb0-4eee-9861-418ba67b719c] Received event network-changed-8ce7fb1c-b76d-48fe-be4f-bb2287adce87 {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 593.972336] env[62503]: DEBUG nova.compute.manager [req-b70b308b-b81b-4627-8d30-d8ee47b01da5 req-cf7328bc-93a6-4320-b302-a62938fb834d service nova] [instance: 7a0b2744-2bb0-4eee-9861-418ba67b719c] Refreshing instance network info cache due to event network-changed-8ce7fb1c-b76d-48fe-be4f-bb2287adce87. {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11432}} [ 593.972336] env[62503]: DEBUG oslo_concurrency.lockutils [req-b70b308b-b81b-4627-8d30-d8ee47b01da5 req-cf7328bc-93a6-4320-b302-a62938fb834d service nova] Acquiring lock "refresh_cache-7a0b2744-2bb0-4eee-9861-418ba67b719c" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 594.067510] env[62503]: DEBUG nova.network.neutron [None req-c5889fb0-1c0e-4d03-b6a4-67f76941137f tempest-ServersTestFqdnHostnames-1580407543 tempest-ServersTestFqdnHostnames-1580407543-project-member] [instance: 7a0b2744-2bb0-4eee-9861-418ba67b719c] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 594.069922] env[62503]: DEBUG nova.network.neutron [None req-cc9648da-f367-499f-9ae4-d540c12618f7 tempest-ServerActionsTestJSON-1148358597 tempest-ServerActionsTestJSON-1148358597-project-member] [instance: d8d4c087-9b0f-48c7-bd05-291a7e2a3e83] Successfully created port: 20d6b598-9cc1-4dee-aa92-c60595007db3 {{(pid=62503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 594.143317] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-280dd265-183b-4c2b-ae27-d0e1a87186ff tempest-ServersAdmin275Test-1807589186 tempest-ServersAdmin275Test-1807589186-project-admin] [instance: c4a88e75-690f-4bed-a4f9-a0de3b193eff] Powering off the VM {{(pid=62503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 594.144087] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-46db7623-e77f-45aa-9242-6728b22a72ed {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.155283] env[62503]: DEBUG oslo_vmware.api [None req-280dd265-183b-4c2b-ae27-d0e1a87186ff tempest-ServersAdmin275Test-1807589186 tempest-ServersAdmin275Test-1807589186-project-admin] Waiting for the task: (returnval){ [ 594.155283] env[62503]: value = "task-1387707" [ 594.155283] env[62503]: _type = "Task" [ 594.155283] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 594.174846] env[62503]: DEBUG oslo_vmware.api [None req-280dd265-183b-4c2b-ae27-d0e1a87186ff tempest-ServersAdmin275Test-1807589186 tempest-ServersAdmin275Test-1807589186-project-admin] Task: {'id': task-1387707, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 594.211279] env[62503]: DEBUG nova.network.neutron [None req-c5889fb0-1c0e-4d03-b6a4-67f76941137f tempest-ServersTestFqdnHostnames-1580407543 tempest-ServersTestFqdnHostnames-1580407543-project-member] [instance: 7a0b2744-2bb0-4eee-9861-418ba67b719c] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 594.444313] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99208cae-2d55-4f62-a54f-851ad20e3751 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.456213] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8327a198-265d-4e55-b68c-968c348469d7 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.492200] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9f9e652-c9f6-41dc-bebb-d623556779e9 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.499789] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bc33b67-577a-451c-b44a-abad5c800ea1 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.514700] env[62503]: DEBUG nova.compute.provider_tree [None req-2db54e89-b7c8-428c-855a-ae3533893b41 tempest-VolumesAssistedSnapshotsTest-64252410 tempest-VolumesAssistedSnapshotsTest-64252410-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 594.676918] env[62503]: DEBUG oslo_vmware.api [None req-280dd265-183b-4c2b-ae27-d0e1a87186ff tempest-ServersAdmin275Test-1807589186 tempest-ServersAdmin275Test-1807589186-project-admin] Task: {'id': task-1387707, 'name': PowerOffVM_Task, 'duration_secs': 0.2442} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 594.676918] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-280dd265-183b-4c2b-ae27-d0e1a87186ff tempest-ServersAdmin275Test-1807589186 tempest-ServersAdmin275Test-1807589186-project-admin] [instance: c4a88e75-690f-4bed-a4f9-a0de3b193eff] Powered off the VM {{(pid=62503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 594.677064] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-280dd265-183b-4c2b-ae27-d0e1a87186ff tempest-ServersAdmin275Test-1807589186 tempest-ServersAdmin275Test-1807589186-project-admin] [instance: c4a88e75-690f-4bed-a4f9-a0de3b193eff] Destroying instance {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 594.678129] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79eaa9df-e4fe-4302-91b4-637bf4b25e0d {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.690900] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-280dd265-183b-4c2b-ae27-d0e1a87186ff tempest-ServersAdmin275Test-1807589186 tempest-ServersAdmin275Test-1807589186-project-admin] [instance: c4a88e75-690f-4bed-a4f9-a0de3b193eff] Unregistering the VM {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 594.691196] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-82a43468-9e7b-4bc4-8fe7-868473f01014 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.718165] env[62503]: DEBUG oslo_concurrency.lockutils [None req-c5889fb0-1c0e-4d03-b6a4-67f76941137f tempest-ServersTestFqdnHostnames-1580407543 tempest-ServersTestFqdnHostnames-1580407543-project-member] Releasing lock "refresh_cache-7a0b2744-2bb0-4eee-9861-418ba67b719c" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 594.720104] env[62503]: DEBUG nova.compute.manager [None req-c5889fb0-1c0e-4d03-b6a4-67f76941137f tempest-ServersTestFqdnHostnames-1580407543 tempest-ServersTestFqdnHostnames-1580407543-project-member] [instance: 7a0b2744-2bb0-4eee-9861-418ba67b719c] Start destroying the instance on the hypervisor. {{(pid=62503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3156}} [ 594.720104] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-c5889fb0-1c0e-4d03-b6a4-67f76941137f tempest-ServersTestFqdnHostnames-1580407543 tempest-ServersTestFqdnHostnames-1580407543-project-member] [instance: 7a0b2744-2bb0-4eee-9861-418ba67b719c] Destroying instance {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 594.720104] env[62503]: DEBUG oslo_concurrency.lockutils [req-b70b308b-b81b-4627-8d30-d8ee47b01da5 req-cf7328bc-93a6-4320-b302-a62938fb834d service nova] Acquired lock "refresh_cache-7a0b2744-2bb0-4eee-9861-418ba67b719c" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 594.720419] env[62503]: DEBUG nova.network.neutron [req-b70b308b-b81b-4627-8d30-d8ee47b01da5 req-cf7328bc-93a6-4320-b302-a62938fb834d service nova] [instance: 7a0b2744-2bb0-4eee-9861-418ba67b719c] Refreshing network info cache for port 8ce7fb1c-b76d-48fe-be4f-bb2287adce87 {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 594.722476] env[62503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0ae1c3fa-d4e6-4ba6-addb-40096f0c4155 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.726393] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-280dd265-183b-4c2b-ae27-d0e1a87186ff tempest-ServersAdmin275Test-1807589186 tempest-ServersAdmin275Test-1807589186-project-admin] [instance: c4a88e75-690f-4bed-a4f9-a0de3b193eff] Unregistered the VM {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 594.726393] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-280dd265-183b-4c2b-ae27-d0e1a87186ff tempest-ServersAdmin275Test-1807589186 tempest-ServersAdmin275Test-1807589186-project-admin] [instance: c4a88e75-690f-4bed-a4f9-a0de3b193eff] Deleting contents of the VM from datastore datastore1 {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 594.726745] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-280dd265-183b-4c2b-ae27-d0e1a87186ff tempest-ServersAdmin275Test-1807589186 tempest-ServersAdmin275Test-1807589186-project-admin] Deleting the datastore file [datastore1] c4a88e75-690f-4bed-a4f9-a0de3b193eff {{(pid=62503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 594.727861] env[62503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d33f97c2-09b3-481f-bc12-a1239b7f2cff {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.741695] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41eca81f-2c1b-4b24-bb59-cac627b3e52d {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.756602] env[62503]: DEBUG oslo_vmware.api [None req-280dd265-183b-4c2b-ae27-d0e1a87186ff tempest-ServersAdmin275Test-1807589186 tempest-ServersAdmin275Test-1807589186-project-admin] Waiting for the task: (returnval){ [ 594.756602] env[62503]: value = "task-1387709" [ 594.756602] env[62503]: _type = "Task" [ 594.756602] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 594.769594] env[62503]: DEBUG oslo_vmware.api [None req-280dd265-183b-4c2b-ae27-d0e1a87186ff tempest-ServersAdmin275Test-1807589186 tempest-ServersAdmin275Test-1807589186-project-admin] Task: {'id': task-1387709, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 594.774710] env[62503]: WARNING nova.virt.vmwareapi.vmops [None req-c5889fb0-1c0e-4d03-b6a4-67f76941137f tempest-ServersTestFqdnHostnames-1580407543 tempest-ServersTestFqdnHostnames-1580407543-project-member] [instance: 7a0b2744-2bb0-4eee-9861-418ba67b719c] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 7a0b2744-2bb0-4eee-9861-418ba67b719c could not be found. [ 594.774857] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-c5889fb0-1c0e-4d03-b6a4-67f76941137f tempest-ServersTestFqdnHostnames-1580407543 tempest-ServersTestFqdnHostnames-1580407543-project-member] [instance: 7a0b2744-2bb0-4eee-9861-418ba67b719c] Instance destroyed {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 594.775045] env[62503]: INFO nova.compute.manager [None req-c5889fb0-1c0e-4d03-b6a4-67f76941137f tempest-ServersTestFqdnHostnames-1580407543 tempest-ServersTestFqdnHostnames-1580407543-project-member] [instance: 7a0b2744-2bb0-4eee-9861-418ba67b719c] Took 0.06 seconds to destroy the instance on the hypervisor. [ 594.775556] env[62503]: DEBUG oslo.service.loopingcall [None req-c5889fb0-1c0e-4d03-b6a4-67f76941137f tempest-ServersTestFqdnHostnames-1580407543 tempest-ServersTestFqdnHostnames-1580407543-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 594.775959] env[62503]: DEBUG nova.compute.manager [-] [instance: 7a0b2744-2bb0-4eee-9861-418ba67b719c] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 594.776109] env[62503]: DEBUG nova.network.neutron [-] [instance: 7a0b2744-2bb0-4eee-9861-418ba67b719c] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 594.831132] env[62503]: DEBUG nova.network.neutron [-] [instance: 7a0b2744-2bb0-4eee-9861-418ba67b719c] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 594.910837] env[62503]: DEBUG nova.compute.manager [None req-cc9648da-f367-499f-9ae4-d540c12618f7 tempest-ServerActionsTestJSON-1148358597 tempest-ServerActionsTestJSON-1148358597-project-member] [instance: d8d4c087-9b0f-48c7-bd05-291a7e2a3e83] Start spawning the instance on the hypervisor. {{(pid=62503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2641}} [ 594.948450] env[62503]: DEBUG nova.virt.hardware [None req-cc9648da-f367-499f-9ae4-d540c12618f7 tempest-ServerActionsTestJSON-1148358597 tempest-ServerActionsTestJSON-1148358597-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:26:20Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:26:03Z,direct_url=,disk_format='vmdk',id=8150ca02-f879-471d-8913-459408f127a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26d9ee75d25b4018b8bb1fb11c8bd98c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:26:04Z,virtual_size=,visibility=), allow threads: False {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 594.948450] env[62503]: DEBUG nova.virt.hardware [None req-cc9648da-f367-499f-9ae4-d540c12618f7 tempest-ServerActionsTestJSON-1148358597 tempest-ServerActionsTestJSON-1148358597-project-member] Flavor limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 594.948450] env[62503]: DEBUG nova.virt.hardware [None req-cc9648da-f367-499f-9ae4-d540c12618f7 tempest-ServerActionsTestJSON-1148358597 tempest-ServerActionsTestJSON-1148358597-project-member] Image limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 594.949270] env[62503]: DEBUG nova.virt.hardware [None req-cc9648da-f367-499f-9ae4-d540c12618f7 tempest-ServerActionsTestJSON-1148358597 tempest-ServerActionsTestJSON-1148358597-project-member] Flavor pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 594.949270] env[62503]: DEBUG nova.virt.hardware [None req-cc9648da-f367-499f-9ae4-d540c12618f7 tempest-ServerActionsTestJSON-1148358597 tempest-ServerActionsTestJSON-1148358597-project-member] Image pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 594.949270] env[62503]: DEBUG nova.virt.hardware [None req-cc9648da-f367-499f-9ae4-d540c12618f7 tempest-ServerActionsTestJSON-1148358597 tempest-ServerActionsTestJSON-1148358597-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 594.949474] env[62503]: DEBUG nova.virt.hardware [None req-cc9648da-f367-499f-9ae4-d540c12618f7 tempest-ServerActionsTestJSON-1148358597 tempest-ServerActionsTestJSON-1148358597-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 594.949645] env[62503]: DEBUG nova.virt.hardware [None req-cc9648da-f367-499f-9ae4-d540c12618f7 tempest-ServerActionsTestJSON-1148358597 tempest-ServerActionsTestJSON-1148358597-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 594.949852] env[62503]: DEBUG nova.virt.hardware [None req-cc9648da-f367-499f-9ae4-d540c12618f7 tempest-ServerActionsTestJSON-1148358597 tempest-ServerActionsTestJSON-1148358597-project-member] Got 1 possible topologies {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 594.950051] env[62503]: DEBUG nova.virt.hardware [None req-cc9648da-f367-499f-9ae4-d540c12618f7 tempest-ServerActionsTestJSON-1148358597 tempest-ServerActionsTestJSON-1148358597-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 594.950297] env[62503]: DEBUG nova.virt.hardware [None req-cc9648da-f367-499f-9ae4-d540c12618f7 tempest-ServerActionsTestJSON-1148358597 tempest-ServerActionsTestJSON-1148358597-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 594.951249] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73731786-2209-417a-9bdd-629a8e895145 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.960950] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1742228-a837-46cb-bb51-73196091fbd8 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.020654] env[62503]: DEBUG nova.scheduler.client.report [None req-2db54e89-b7c8-428c-855a-ae3533893b41 tempest-VolumesAssistedSnapshotsTest-64252410 tempest-VolumesAssistedSnapshotsTest-64252410-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 595.246266] env[62503]: DEBUG nova.network.neutron [req-b70b308b-b81b-4627-8d30-d8ee47b01da5 req-cf7328bc-93a6-4320-b302-a62938fb834d service nova] [instance: 7a0b2744-2bb0-4eee-9861-418ba67b719c] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 595.274174] env[62503]: DEBUG oslo_vmware.api [None req-280dd265-183b-4c2b-ae27-d0e1a87186ff tempest-ServersAdmin275Test-1807589186 tempest-ServersAdmin275Test-1807589186-project-admin] Task: {'id': task-1387709, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.33335} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 595.274518] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-280dd265-183b-4c2b-ae27-d0e1a87186ff tempest-ServersAdmin275Test-1807589186 tempest-ServersAdmin275Test-1807589186-project-admin] Deleted the datastore file {{(pid=62503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 595.274751] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-280dd265-183b-4c2b-ae27-d0e1a87186ff tempest-ServersAdmin275Test-1807589186 tempest-ServersAdmin275Test-1807589186-project-admin] [instance: c4a88e75-690f-4bed-a4f9-a0de3b193eff] Deleted contents of the VM from datastore datastore1 {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 595.274932] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-280dd265-183b-4c2b-ae27-d0e1a87186ff tempest-ServersAdmin275Test-1807589186 tempest-ServersAdmin275Test-1807589186-project-admin] [instance: c4a88e75-690f-4bed-a4f9-a0de3b193eff] Instance destroyed {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 595.324968] env[62503]: DEBUG oslo_concurrency.lockutils [None req-e421dd54-c082-45de-8156-0a223999bd64 tempest-ServerRescueTestJSON-89698622 tempest-ServerRescueTestJSON-89698622-project-member] Acquiring lock "d4930731-7333-426c-a2fc-a732d351a0f0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 595.325240] env[62503]: DEBUG oslo_concurrency.lockutils [None req-e421dd54-c082-45de-8156-0a223999bd64 tempest-ServerRescueTestJSON-89698622 tempest-ServerRescueTestJSON-89698622-project-member] Lock "d4930731-7333-426c-a2fc-a732d351a0f0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 595.334078] env[62503]: DEBUG nova.network.neutron [-] [instance: 7a0b2744-2bb0-4eee-9861-418ba67b719c] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 595.389379] env[62503]: DEBUG nova.network.neutron [req-b70b308b-b81b-4627-8d30-d8ee47b01da5 req-cf7328bc-93a6-4320-b302-a62938fb834d service nova] [instance: 7a0b2744-2bb0-4eee-9861-418ba67b719c] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 595.525342] env[62503]: DEBUG oslo_concurrency.lockutils [None req-2db54e89-b7c8-428c-855a-ae3533893b41 tempest-VolumesAssistedSnapshotsTest-64252410 tempest-VolumesAssistedSnapshotsTest-64252410-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.658s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 595.525866] env[62503]: DEBUG nova.compute.manager [None req-2db54e89-b7c8-428c-855a-ae3533893b41 tempest-VolumesAssistedSnapshotsTest-64252410 tempest-VolumesAssistedSnapshotsTest-64252410-project-member] [instance: 24b4c233-c874-452e-a7fc-492ca2a49a09] Start building networks asynchronously for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2832}} [ 595.529100] env[62503]: DEBUG oslo_concurrency.lockutils [None req-e7961620-13a6-43b0-8a39-27d7c19defe9 tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 19.940s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 595.529352] env[62503]: DEBUG nova.objects.instance [None req-e7961620-13a6-43b0-8a39-27d7c19defe9 tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] Lazy-loading 'resources' on Instance uuid 90e42997-a34c-4a39-8d2f-7ab0ed19f028 {{(pid=62503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 595.837146] env[62503]: INFO nova.compute.manager [-] [instance: 7a0b2744-2bb0-4eee-9861-418ba67b719c] Took 1.06 seconds to deallocate network for instance. [ 595.840625] env[62503]: DEBUG nova.compute.claims [None req-c5889fb0-1c0e-4d03-b6a4-67f76941137f tempest-ServersTestFqdnHostnames-1580407543 tempest-ServersTestFqdnHostnames-1580407543-project-member] [instance: 7a0b2744-2bb0-4eee-9861-418ba67b719c] Aborting claim: {{(pid=62503) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 595.840847] env[62503]: DEBUG oslo_concurrency.lockutils [None req-c5889fb0-1c0e-4d03-b6a4-67f76941137f tempest-ServersTestFqdnHostnames-1580407543 tempest-ServersTestFqdnHostnames-1580407543-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 595.893532] env[62503]: DEBUG oslo_concurrency.lockutils [req-b70b308b-b81b-4627-8d30-d8ee47b01da5 req-cf7328bc-93a6-4320-b302-a62938fb834d service nova] Releasing lock "refresh_cache-7a0b2744-2bb0-4eee-9861-418ba67b719c" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 595.988286] env[62503]: DEBUG nova.compute.manager [req-246ce6fe-e7f9-44d7-833f-5f9c7010c8e5 req-683434e7-295b-4287-8e80-2cfa2e4811d9 service nova] [instance: d8d4c087-9b0f-48c7-bd05-291a7e2a3e83] Received event network-changed-20d6b598-9cc1-4dee-aa92-c60595007db3 {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 595.988548] env[62503]: DEBUG nova.compute.manager [req-246ce6fe-e7f9-44d7-833f-5f9c7010c8e5 req-683434e7-295b-4287-8e80-2cfa2e4811d9 service nova] [instance: d8d4c087-9b0f-48c7-bd05-291a7e2a3e83] Refreshing instance network info cache due to event network-changed-20d6b598-9cc1-4dee-aa92-c60595007db3. {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11432}} [ 595.989106] env[62503]: DEBUG oslo_concurrency.lockutils [req-246ce6fe-e7f9-44d7-833f-5f9c7010c8e5 req-683434e7-295b-4287-8e80-2cfa2e4811d9 service nova] Acquiring lock "refresh_cache-d8d4c087-9b0f-48c7-bd05-291a7e2a3e83" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 595.989410] env[62503]: DEBUG oslo_concurrency.lockutils [req-246ce6fe-e7f9-44d7-833f-5f9c7010c8e5 req-683434e7-295b-4287-8e80-2cfa2e4811d9 service nova] Acquired lock "refresh_cache-d8d4c087-9b0f-48c7-bd05-291a7e2a3e83" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 595.989506] env[62503]: DEBUG nova.network.neutron [req-246ce6fe-e7f9-44d7-833f-5f9c7010c8e5 req-683434e7-295b-4287-8e80-2cfa2e4811d9 service nova] [instance: d8d4c087-9b0f-48c7-bd05-291a7e2a3e83] Refreshing network info cache for port 20d6b598-9cc1-4dee-aa92-c60595007db3 {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 596.033019] env[62503]: DEBUG nova.compute.utils [None req-2db54e89-b7c8-428c-855a-ae3533893b41 tempest-VolumesAssistedSnapshotsTest-64252410 tempest-VolumesAssistedSnapshotsTest-64252410-project-member] Using /dev/sd instead of None {{(pid=62503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 596.039751] env[62503]: DEBUG nova.compute.manager [None req-2db54e89-b7c8-428c-855a-ae3533893b41 tempest-VolumesAssistedSnapshotsTest-64252410 tempest-VolumesAssistedSnapshotsTest-64252410-project-member] [instance: 24b4c233-c874-452e-a7fc-492ca2a49a09] Allocating IP information in the background. {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 596.039751] env[62503]: DEBUG nova.network.neutron [None req-2db54e89-b7c8-428c-855a-ae3533893b41 tempest-VolumesAssistedSnapshotsTest-64252410 tempest-VolumesAssistedSnapshotsTest-64252410-project-member] [instance: 24b4c233-c874-452e-a7fc-492ca2a49a09] allocate_for_instance() {{(pid=62503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 596.050100] env[62503]: ERROR nova.compute.manager [None req-cc9648da-f367-499f-9ae4-d540c12618f7 tempest-ServerActionsTestJSON-1148358597 tempest-ServerActionsTestJSON-1148358597-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 20d6b598-9cc1-4dee-aa92-c60595007db3, please check neutron logs for more information. [ 596.050100] env[62503]: ERROR nova.compute.manager Traceback (most recent call last): [ 596.050100] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 596.050100] env[62503]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 596.050100] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 596.050100] env[62503]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 596.050100] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 596.050100] env[62503]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 596.050100] env[62503]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 596.050100] env[62503]: ERROR nova.compute.manager self.force_reraise() [ 596.050100] env[62503]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 596.050100] env[62503]: ERROR nova.compute.manager raise self.value [ 596.050100] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 596.050100] env[62503]: ERROR nova.compute.manager updated_port = self._update_port( [ 596.050100] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 596.050100] env[62503]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 596.050740] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 596.050740] env[62503]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 596.050740] env[62503]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 20d6b598-9cc1-4dee-aa92-c60595007db3, please check neutron logs for more information. [ 596.050740] env[62503]: ERROR nova.compute.manager [ 596.050740] env[62503]: Traceback (most recent call last): [ 596.050740] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 596.050740] env[62503]: listener.cb(fileno) [ 596.050740] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 596.050740] env[62503]: result = function(*args, **kwargs) [ 596.050740] env[62503]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 596.050740] env[62503]: return func(*args, **kwargs) [ 596.050740] env[62503]: File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 596.050740] env[62503]: raise e [ 596.050740] env[62503]: File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 596.050740] env[62503]: nwinfo = self.network_api.allocate_for_instance( [ 596.050740] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 596.050740] env[62503]: created_port_ids = self._update_ports_for_instance( [ 596.050740] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 596.050740] env[62503]: with excutils.save_and_reraise_exception(): [ 596.050740] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 596.050740] env[62503]: self.force_reraise() [ 596.050740] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 596.050740] env[62503]: raise self.value [ 596.050740] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 596.050740] env[62503]: updated_port = self._update_port( [ 596.050740] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 596.050740] env[62503]: _ensure_no_port_binding_failure(port) [ 596.050740] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 596.050740] env[62503]: raise exception.PortBindingFailed(port_id=port['id']) [ 596.051539] env[62503]: nova.exception.PortBindingFailed: Binding failed for port 20d6b598-9cc1-4dee-aa92-c60595007db3, please check neutron logs for more information. [ 596.051539] env[62503]: Removing descriptor: 14 [ 596.051539] env[62503]: ERROR nova.compute.manager [None req-cc9648da-f367-499f-9ae4-d540c12618f7 tempest-ServerActionsTestJSON-1148358597 tempest-ServerActionsTestJSON-1148358597-project-member] [instance: d8d4c087-9b0f-48c7-bd05-291a7e2a3e83] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 20d6b598-9cc1-4dee-aa92-c60595007db3, please check neutron logs for more information. [ 596.051539] env[62503]: ERROR nova.compute.manager [instance: d8d4c087-9b0f-48c7-bd05-291a7e2a3e83] Traceback (most recent call last): [ 596.051539] env[62503]: ERROR nova.compute.manager [instance: d8d4c087-9b0f-48c7-bd05-291a7e2a3e83] File "/opt/stack/nova/nova/compute/manager.py", line 2897, in _build_resources [ 596.051539] env[62503]: ERROR nova.compute.manager [instance: d8d4c087-9b0f-48c7-bd05-291a7e2a3e83] yield resources [ 596.051539] env[62503]: ERROR nova.compute.manager [instance: d8d4c087-9b0f-48c7-bd05-291a7e2a3e83] File "/opt/stack/nova/nova/compute/manager.py", line 2644, in _build_and_run_instance [ 596.051539] env[62503]: ERROR nova.compute.manager [instance: d8d4c087-9b0f-48c7-bd05-291a7e2a3e83] self.driver.spawn(context, instance, image_meta, [ 596.051539] env[62503]: ERROR nova.compute.manager [instance: d8d4c087-9b0f-48c7-bd05-291a7e2a3e83] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 596.051539] env[62503]: ERROR nova.compute.manager [instance: d8d4c087-9b0f-48c7-bd05-291a7e2a3e83] self._vmops.spawn(context, instance, image_meta, injected_files, [ 596.051539] env[62503]: ERROR nova.compute.manager [instance: d8d4c087-9b0f-48c7-bd05-291a7e2a3e83] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 596.051539] env[62503]: ERROR nova.compute.manager [instance: d8d4c087-9b0f-48c7-bd05-291a7e2a3e83] vm_ref = self.build_virtual_machine(instance, [ 596.052830] env[62503]: ERROR nova.compute.manager [instance: d8d4c087-9b0f-48c7-bd05-291a7e2a3e83] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 596.052830] env[62503]: ERROR nova.compute.manager [instance: d8d4c087-9b0f-48c7-bd05-291a7e2a3e83] vif_infos = vmwarevif.get_vif_info(self._session, [ 596.052830] env[62503]: ERROR nova.compute.manager [instance: d8d4c087-9b0f-48c7-bd05-291a7e2a3e83] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 596.052830] env[62503]: ERROR nova.compute.manager [instance: d8d4c087-9b0f-48c7-bd05-291a7e2a3e83] for vif in network_info: [ 596.052830] env[62503]: ERROR nova.compute.manager [instance: d8d4c087-9b0f-48c7-bd05-291a7e2a3e83] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 596.052830] env[62503]: ERROR nova.compute.manager [instance: d8d4c087-9b0f-48c7-bd05-291a7e2a3e83] return self._sync_wrapper(fn, *args, **kwargs) [ 596.052830] env[62503]: ERROR nova.compute.manager [instance: d8d4c087-9b0f-48c7-bd05-291a7e2a3e83] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 596.052830] env[62503]: ERROR nova.compute.manager [instance: d8d4c087-9b0f-48c7-bd05-291a7e2a3e83] self.wait() [ 596.052830] env[62503]: ERROR nova.compute.manager [instance: d8d4c087-9b0f-48c7-bd05-291a7e2a3e83] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 596.052830] env[62503]: ERROR nova.compute.manager [instance: d8d4c087-9b0f-48c7-bd05-291a7e2a3e83] self[:] = self._gt.wait() [ 596.052830] env[62503]: ERROR nova.compute.manager [instance: d8d4c087-9b0f-48c7-bd05-291a7e2a3e83] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 596.052830] env[62503]: ERROR nova.compute.manager [instance: d8d4c087-9b0f-48c7-bd05-291a7e2a3e83] return self._exit_event.wait() [ 596.052830] env[62503]: ERROR nova.compute.manager [instance: d8d4c087-9b0f-48c7-bd05-291a7e2a3e83] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 596.054078] env[62503]: ERROR nova.compute.manager [instance: d8d4c087-9b0f-48c7-bd05-291a7e2a3e83] result = hub.switch() [ 596.054078] env[62503]: ERROR nova.compute.manager [instance: d8d4c087-9b0f-48c7-bd05-291a7e2a3e83] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 596.054078] env[62503]: ERROR nova.compute.manager [instance: d8d4c087-9b0f-48c7-bd05-291a7e2a3e83] return self.greenlet.switch() [ 596.054078] env[62503]: ERROR nova.compute.manager [instance: d8d4c087-9b0f-48c7-bd05-291a7e2a3e83] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 596.054078] env[62503]: ERROR nova.compute.manager [instance: d8d4c087-9b0f-48c7-bd05-291a7e2a3e83] result = function(*args, **kwargs) [ 596.054078] env[62503]: ERROR nova.compute.manager [instance: d8d4c087-9b0f-48c7-bd05-291a7e2a3e83] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 596.054078] env[62503]: ERROR nova.compute.manager [instance: d8d4c087-9b0f-48c7-bd05-291a7e2a3e83] return func(*args, **kwargs) [ 596.054078] env[62503]: ERROR nova.compute.manager [instance: d8d4c087-9b0f-48c7-bd05-291a7e2a3e83] File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 596.054078] env[62503]: ERROR nova.compute.manager [instance: d8d4c087-9b0f-48c7-bd05-291a7e2a3e83] raise e [ 596.054078] env[62503]: ERROR nova.compute.manager [instance: d8d4c087-9b0f-48c7-bd05-291a7e2a3e83] File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 596.054078] env[62503]: ERROR nova.compute.manager [instance: d8d4c087-9b0f-48c7-bd05-291a7e2a3e83] nwinfo = self.network_api.allocate_for_instance( [ 596.054078] env[62503]: ERROR nova.compute.manager [instance: d8d4c087-9b0f-48c7-bd05-291a7e2a3e83] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 596.054078] env[62503]: ERROR nova.compute.manager [instance: d8d4c087-9b0f-48c7-bd05-291a7e2a3e83] created_port_ids = self._update_ports_for_instance( [ 596.054995] env[62503]: ERROR nova.compute.manager [instance: d8d4c087-9b0f-48c7-bd05-291a7e2a3e83] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 596.054995] env[62503]: ERROR nova.compute.manager [instance: d8d4c087-9b0f-48c7-bd05-291a7e2a3e83] with excutils.save_and_reraise_exception(): [ 596.054995] env[62503]: ERROR nova.compute.manager [instance: d8d4c087-9b0f-48c7-bd05-291a7e2a3e83] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 596.054995] env[62503]: ERROR nova.compute.manager [instance: d8d4c087-9b0f-48c7-bd05-291a7e2a3e83] self.force_reraise() [ 596.054995] env[62503]: ERROR nova.compute.manager [instance: d8d4c087-9b0f-48c7-bd05-291a7e2a3e83] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 596.054995] env[62503]: ERROR nova.compute.manager [instance: d8d4c087-9b0f-48c7-bd05-291a7e2a3e83] raise self.value [ 596.054995] env[62503]: ERROR nova.compute.manager [instance: d8d4c087-9b0f-48c7-bd05-291a7e2a3e83] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 596.054995] env[62503]: ERROR nova.compute.manager [instance: d8d4c087-9b0f-48c7-bd05-291a7e2a3e83] updated_port = self._update_port( [ 596.054995] env[62503]: ERROR nova.compute.manager [instance: d8d4c087-9b0f-48c7-bd05-291a7e2a3e83] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 596.054995] env[62503]: ERROR nova.compute.manager [instance: d8d4c087-9b0f-48c7-bd05-291a7e2a3e83] _ensure_no_port_binding_failure(port) [ 596.054995] env[62503]: ERROR nova.compute.manager [instance: d8d4c087-9b0f-48c7-bd05-291a7e2a3e83] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 596.054995] env[62503]: ERROR nova.compute.manager [instance: d8d4c087-9b0f-48c7-bd05-291a7e2a3e83] raise exception.PortBindingFailed(port_id=port['id']) [ 596.055717] env[62503]: ERROR nova.compute.manager [instance: d8d4c087-9b0f-48c7-bd05-291a7e2a3e83] nova.exception.PortBindingFailed: Binding failed for port 20d6b598-9cc1-4dee-aa92-c60595007db3, please check neutron logs for more information. [ 596.055717] env[62503]: ERROR nova.compute.manager [instance: d8d4c087-9b0f-48c7-bd05-291a7e2a3e83] [ 596.055717] env[62503]: INFO nova.compute.manager [None req-cc9648da-f367-499f-9ae4-d540c12618f7 tempest-ServerActionsTestJSON-1148358597 tempest-ServerActionsTestJSON-1148358597-project-member] [instance: d8d4c087-9b0f-48c7-bd05-291a7e2a3e83] Terminating instance [ 596.055717] env[62503]: DEBUG oslo_concurrency.lockutils [None req-cc9648da-f367-499f-9ae4-d540c12618f7 tempest-ServerActionsTestJSON-1148358597 tempest-ServerActionsTestJSON-1148358597-project-member] Acquiring lock "refresh_cache-d8d4c087-9b0f-48c7-bd05-291a7e2a3e83" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 596.144079] env[62503]: DEBUG nova.policy [None req-2db54e89-b7c8-428c-855a-ae3533893b41 tempest-VolumesAssistedSnapshotsTest-64252410 tempest-VolumesAssistedSnapshotsTest-64252410-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b5b0eb7327ff4f71bef559195b936490', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6c30b4b985d244b6b2c421d4fd234c50', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62503) authorize /opt/stack/nova/nova/policy.py:201}} [ 596.149491] env[62503]: DEBUG nova.compute.manager [req-79d01938-bc5f-44a1-aca8-d979ad5ea273 req-fd71f64e-9a5d-477f-8ad8-449a69bcfd57 service nova] [instance: 7a0b2744-2bb0-4eee-9861-418ba67b719c] Received event network-vif-deleted-8ce7fb1c-b76d-48fe-be4f-bb2287adce87 {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 596.324532] env[62503]: DEBUG nova.virt.hardware [None req-280dd265-183b-4c2b-ae27-d0e1a87186ff tempest-ServersAdmin275Test-1807589186 tempest-ServersAdmin275Test-1807589186-project-admin] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:26:20Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:26:03Z,direct_url=,disk_format='vmdk',id=8150ca02-f879-471d-8913-459408f127a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26d9ee75d25b4018b8bb1fb11c8bd98c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:26:04Z,virtual_size=,visibility=), allow threads: False {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 596.324784] env[62503]: DEBUG nova.virt.hardware [None req-280dd265-183b-4c2b-ae27-d0e1a87186ff tempest-ServersAdmin275Test-1807589186 tempest-ServersAdmin275Test-1807589186-project-admin] Flavor limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 596.324942] env[62503]: DEBUG nova.virt.hardware [None req-280dd265-183b-4c2b-ae27-d0e1a87186ff tempest-ServersAdmin275Test-1807589186 tempest-ServersAdmin275Test-1807589186-project-admin] Image limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 596.325133] env[62503]: DEBUG nova.virt.hardware [None req-280dd265-183b-4c2b-ae27-d0e1a87186ff tempest-ServersAdmin275Test-1807589186 tempest-ServersAdmin275Test-1807589186-project-admin] Flavor pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 596.325275] env[62503]: DEBUG nova.virt.hardware [None req-280dd265-183b-4c2b-ae27-d0e1a87186ff tempest-ServersAdmin275Test-1807589186 tempest-ServersAdmin275Test-1807589186-project-admin] Image pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 596.325414] env[62503]: DEBUG nova.virt.hardware [None req-280dd265-183b-4c2b-ae27-d0e1a87186ff tempest-ServersAdmin275Test-1807589186 tempest-ServersAdmin275Test-1807589186-project-admin] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 596.325712] env[62503]: DEBUG nova.virt.hardware [None req-280dd265-183b-4c2b-ae27-d0e1a87186ff tempest-ServersAdmin275Test-1807589186 tempest-ServersAdmin275Test-1807589186-project-admin] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 596.325763] env[62503]: DEBUG nova.virt.hardware [None req-280dd265-183b-4c2b-ae27-d0e1a87186ff tempest-ServersAdmin275Test-1807589186 tempest-ServersAdmin275Test-1807589186-project-admin] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 596.326516] env[62503]: DEBUG nova.virt.hardware [None req-280dd265-183b-4c2b-ae27-d0e1a87186ff tempest-ServersAdmin275Test-1807589186 tempest-ServersAdmin275Test-1807589186-project-admin] Got 1 possible topologies {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 596.326516] env[62503]: DEBUG nova.virt.hardware [None req-280dd265-183b-4c2b-ae27-d0e1a87186ff tempest-ServersAdmin275Test-1807589186 tempest-ServersAdmin275Test-1807589186-project-admin] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 596.326516] env[62503]: DEBUG nova.virt.hardware [None req-280dd265-183b-4c2b-ae27-d0e1a87186ff tempest-ServersAdmin275Test-1807589186 tempest-ServersAdmin275Test-1807589186-project-admin] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 596.327568] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15e8319e-f209-4d9b-bee0-473595be2109 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.340257] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52fdaf71-ad09-491b-a020-a93759554993 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.363712] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-280dd265-183b-4c2b-ae27-d0e1a87186ff tempest-ServersAdmin275Test-1807589186 tempest-ServersAdmin275Test-1807589186-project-admin] [instance: c4a88e75-690f-4bed-a4f9-a0de3b193eff] Instance VIF info [] {{(pid=62503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 596.371856] env[62503]: DEBUG oslo.service.loopingcall [None req-280dd265-183b-4c2b-ae27-d0e1a87186ff tempest-ServersAdmin275Test-1807589186 tempest-ServersAdmin275Test-1807589186-project-admin] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 596.374592] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c4a88e75-690f-4bed-a4f9-a0de3b193eff] Creating VM on the ESX host {{(pid=62503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 596.375208] env[62503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-78e74c61-81ab-4795-9bd4-218f86d5d00c {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.395084] env[62503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 596.395084] env[62503]: value = "task-1387711" [ 596.395084] env[62503]: _type = "Task" [ 596.395084] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 596.403959] env[62503]: DEBUG oslo_vmware.api [-] Task: {'id': task-1387711, 'name': CreateVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 596.470246] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d574c8a4-8bf9-41d9-b110-e32f32fbc0bd {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.477755] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf7e40bf-55c1-4810-92a9-f95b49820576 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.512620] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea9c6d09-11a2-4824-9410-d40ba000d423 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.520375] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adf85746-8c7f-4557-a75e-45f29bfac31b {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.534463] env[62503]: DEBUG nova.compute.provider_tree [None req-e7961620-13a6-43b0-8a39-27d7c19defe9 tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 596.538626] env[62503]: DEBUG nova.network.neutron [req-246ce6fe-e7f9-44d7-833f-5f9c7010c8e5 req-683434e7-295b-4287-8e80-2cfa2e4811d9 service nova] [instance: d8d4c087-9b0f-48c7-bd05-291a7e2a3e83] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 596.538748] env[62503]: DEBUG nova.compute.manager [None req-2db54e89-b7c8-428c-855a-ae3533893b41 tempest-VolumesAssistedSnapshotsTest-64252410 tempest-VolumesAssistedSnapshotsTest-64252410-project-member] [instance: 24b4c233-c874-452e-a7fc-492ca2a49a09] Start building block device mappings for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2867}} [ 596.672151] env[62503]: DEBUG nova.network.neutron [req-246ce6fe-e7f9-44d7-833f-5f9c7010c8e5 req-683434e7-295b-4287-8e80-2cfa2e4811d9 service nova] [instance: d8d4c087-9b0f-48c7-bd05-291a7e2a3e83] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 596.770796] env[62503]: DEBUG nova.network.neutron [None req-2db54e89-b7c8-428c-855a-ae3533893b41 tempest-VolumesAssistedSnapshotsTest-64252410 tempest-VolumesAssistedSnapshotsTest-64252410-project-member] [instance: 24b4c233-c874-452e-a7fc-492ca2a49a09] Successfully created port: af8161d8-e134-4f1a-a976-807eaad5a1b0 {{(pid=62503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 596.907951] env[62503]: DEBUG oslo_vmware.api [-] Task: {'id': task-1387711, 'name': CreateVM_Task, 'duration_secs': 0.390067} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 596.908221] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c4a88e75-690f-4bed-a4f9-a0de3b193eff] Created VM on the ESX host {{(pid=62503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 596.909039] env[62503]: DEBUG oslo_concurrency.lockutils [None req-280dd265-183b-4c2b-ae27-d0e1a87186ff tempest-ServersAdmin275Test-1807589186 tempest-ServersAdmin275Test-1807589186-project-admin] Acquiring lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 596.909039] env[62503]: DEBUG oslo_concurrency.lockutils [None req-280dd265-183b-4c2b-ae27-d0e1a87186ff tempest-ServersAdmin275Test-1807589186 tempest-ServersAdmin275Test-1807589186-project-admin] Acquired lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 596.909892] env[62503]: DEBUG oslo_concurrency.lockutils [None req-280dd265-183b-4c2b-ae27-d0e1a87186ff tempest-ServersAdmin275Test-1807589186 tempest-ServersAdmin275Test-1807589186-project-admin] Acquired external semaphore "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 596.910243] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-96f3860d-f515-4246-877a-bf32bcce6c82 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.916463] env[62503]: DEBUG oslo_vmware.api [None req-280dd265-183b-4c2b-ae27-d0e1a87186ff tempest-ServersAdmin275Test-1807589186 tempest-ServersAdmin275Test-1807589186-project-admin] Waiting for the task: (returnval){ [ 596.916463] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52e03710-c5d9-8e1a-b37b-6063a5bafae5" [ 596.916463] env[62503]: _type = "Task" [ 596.916463] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 596.927016] env[62503]: DEBUG oslo_vmware.api [None req-280dd265-183b-4c2b-ae27-d0e1a87186ff tempest-ServersAdmin275Test-1807589186 tempest-ServersAdmin275Test-1807589186-project-admin] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52e03710-c5d9-8e1a-b37b-6063a5bafae5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 597.042489] env[62503]: DEBUG nova.scheduler.client.report [None req-e7961620-13a6-43b0-8a39-27d7c19defe9 tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 597.177677] env[62503]: DEBUG oslo_concurrency.lockutils [req-246ce6fe-e7f9-44d7-833f-5f9c7010c8e5 req-683434e7-295b-4287-8e80-2cfa2e4811d9 service nova] Releasing lock "refresh_cache-d8d4c087-9b0f-48c7-bd05-291a7e2a3e83" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 597.177677] env[62503]: DEBUG oslo_concurrency.lockutils [None req-cc9648da-f367-499f-9ae4-d540c12618f7 tempest-ServerActionsTestJSON-1148358597 tempest-ServerActionsTestJSON-1148358597-project-member] Acquired lock "refresh_cache-d8d4c087-9b0f-48c7-bd05-291a7e2a3e83" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 597.177677] env[62503]: DEBUG nova.network.neutron [None req-cc9648da-f367-499f-9ae4-d540c12618f7 tempest-ServerActionsTestJSON-1148358597 tempest-ServerActionsTestJSON-1148358597-project-member] [instance: d8d4c087-9b0f-48c7-bd05-291a7e2a3e83] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 597.431878] env[62503]: DEBUG oslo_vmware.api [None req-280dd265-183b-4c2b-ae27-d0e1a87186ff tempest-ServersAdmin275Test-1807589186 tempest-ServersAdmin275Test-1807589186-project-admin] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52e03710-c5d9-8e1a-b37b-6063a5bafae5, 'name': SearchDatastore_Task, 'duration_secs': 0.009304} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 597.432389] env[62503]: DEBUG oslo_concurrency.lockutils [None req-280dd265-183b-4c2b-ae27-d0e1a87186ff tempest-ServersAdmin275Test-1807589186 tempest-ServersAdmin275Test-1807589186-project-admin] Releasing lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 597.435062] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-280dd265-183b-4c2b-ae27-d0e1a87186ff tempest-ServersAdmin275Test-1807589186 tempest-ServersAdmin275Test-1807589186-project-admin] [instance: c4a88e75-690f-4bed-a4f9-a0de3b193eff] Processing image 8150ca02-f879-471d-8913-459408f127a1 {{(pid=62503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 597.435062] env[62503]: DEBUG oslo_concurrency.lockutils [None req-280dd265-183b-4c2b-ae27-d0e1a87186ff tempest-ServersAdmin275Test-1807589186 tempest-ServersAdmin275Test-1807589186-project-admin] Acquiring lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 597.435062] env[62503]: DEBUG oslo_concurrency.lockutils [None req-280dd265-183b-4c2b-ae27-d0e1a87186ff tempest-ServersAdmin275Test-1807589186 tempest-ServersAdmin275Test-1807589186-project-admin] Acquired lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 597.435062] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-280dd265-183b-4c2b-ae27-d0e1a87186ff tempest-ServersAdmin275Test-1807589186 tempest-ServersAdmin275Test-1807589186-project-admin] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 597.435062] env[62503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-be4e2522-29fa-4fbe-8c39-8f0238f46828 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.442943] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-280dd265-183b-4c2b-ae27-d0e1a87186ff tempest-ServersAdmin275Test-1807589186 tempest-ServersAdmin275Test-1807589186-project-admin] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 597.443145] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-280dd265-183b-4c2b-ae27-d0e1a87186ff tempest-ServersAdmin275Test-1807589186 tempest-ServersAdmin275Test-1807589186-project-admin] Folder [datastore1] devstack-image-cache_base created. {{(pid=62503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 597.443884] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-416eac9e-ca25-49f9-91bb-f04ec6e393d3 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.451147] env[62503]: DEBUG oslo_vmware.api [None req-280dd265-183b-4c2b-ae27-d0e1a87186ff tempest-ServersAdmin275Test-1807589186 tempest-ServersAdmin275Test-1807589186-project-admin] Waiting for the task: (returnval){ [ 597.451147] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]5222aaec-9c22-8394-02e8-ddc1b0323730" [ 597.451147] env[62503]: _type = "Task" [ 597.451147] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 597.460374] env[62503]: DEBUG oslo_vmware.api [None req-280dd265-183b-4c2b-ae27-d0e1a87186ff tempest-ServersAdmin275Test-1807589186 tempest-ServersAdmin275Test-1807589186-project-admin] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]5222aaec-9c22-8394-02e8-ddc1b0323730, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 597.552410] env[62503]: DEBUG oslo_concurrency.lockutils [None req-e7961620-13a6-43b0-8a39-27d7c19defe9 tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.023s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 597.555034] env[62503]: DEBUG oslo_concurrency.lockutils [None req-6fd2735d-f55f-4cf9-b0ab-01612c901fa1 tempest-AttachInterfacesUnderV243Test-714583448 tempest-AttachInterfacesUnderV243Test-714583448-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.062s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 597.556600] env[62503]: INFO nova.compute.claims [None req-6fd2735d-f55f-4cf9-b0ab-01612c901fa1 tempest-AttachInterfacesUnderV243Test-714583448 tempest-AttachInterfacesUnderV243Test-714583448-project-member] [instance: 20bf8c62-8b80-45c2-98d4-5a960f465aa0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 597.562654] env[62503]: DEBUG nova.compute.manager [None req-2db54e89-b7c8-428c-855a-ae3533893b41 tempest-VolumesAssistedSnapshotsTest-64252410 tempest-VolumesAssistedSnapshotsTest-64252410-project-member] [instance: 24b4c233-c874-452e-a7fc-492ca2a49a09] Start spawning the instance on the hypervisor. {{(pid=62503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2641}} [ 597.605410] env[62503]: INFO nova.scheduler.client.report [None req-e7961620-13a6-43b0-8a39-27d7c19defe9 tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] Deleted allocations for instance 90e42997-a34c-4a39-8d2f-7ab0ed19f028 [ 597.616305] env[62503]: DEBUG nova.virt.hardware [None req-2db54e89-b7c8-428c-855a-ae3533893b41 tempest-VolumesAssistedSnapshotsTest-64252410 tempest-VolumesAssistedSnapshotsTest-64252410-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:26:20Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:26:03Z,direct_url=,disk_format='vmdk',id=8150ca02-f879-471d-8913-459408f127a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26d9ee75d25b4018b8bb1fb11c8bd98c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:26:04Z,virtual_size=,visibility=), allow threads: False {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 597.616305] env[62503]: DEBUG nova.virt.hardware [None req-2db54e89-b7c8-428c-855a-ae3533893b41 tempest-VolumesAssistedSnapshotsTest-64252410 tempest-VolumesAssistedSnapshotsTest-64252410-project-member] Flavor limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 597.616305] env[62503]: DEBUG nova.virt.hardware [None req-2db54e89-b7c8-428c-855a-ae3533893b41 tempest-VolumesAssistedSnapshotsTest-64252410 tempest-VolumesAssistedSnapshotsTest-64252410-project-member] Image limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 597.616482] env[62503]: DEBUG nova.virt.hardware [None req-2db54e89-b7c8-428c-855a-ae3533893b41 tempest-VolumesAssistedSnapshotsTest-64252410 tempest-VolumesAssistedSnapshotsTest-64252410-project-member] Flavor pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 597.617372] env[62503]: DEBUG nova.virt.hardware [None req-2db54e89-b7c8-428c-855a-ae3533893b41 tempest-VolumesAssistedSnapshotsTest-64252410 tempest-VolumesAssistedSnapshotsTest-64252410-project-member] Image pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 597.617372] env[62503]: DEBUG nova.virt.hardware [None req-2db54e89-b7c8-428c-855a-ae3533893b41 tempest-VolumesAssistedSnapshotsTest-64252410 tempest-VolumesAssistedSnapshotsTest-64252410-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 597.617599] env[62503]: DEBUG nova.virt.hardware [None req-2db54e89-b7c8-428c-855a-ae3533893b41 tempest-VolumesAssistedSnapshotsTest-64252410 tempest-VolumesAssistedSnapshotsTest-64252410-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 597.617898] env[62503]: DEBUG nova.virt.hardware [None req-2db54e89-b7c8-428c-855a-ae3533893b41 tempest-VolumesAssistedSnapshotsTest-64252410 tempest-VolumesAssistedSnapshotsTest-64252410-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 597.618011] env[62503]: DEBUG nova.virt.hardware [None req-2db54e89-b7c8-428c-855a-ae3533893b41 tempest-VolumesAssistedSnapshotsTest-64252410 tempest-VolumesAssistedSnapshotsTest-64252410-project-member] Got 1 possible topologies {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 597.621841] env[62503]: DEBUG nova.virt.hardware [None req-2db54e89-b7c8-428c-855a-ae3533893b41 tempest-VolumesAssistedSnapshotsTest-64252410 tempest-VolumesAssistedSnapshotsTest-64252410-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 597.621841] env[62503]: DEBUG nova.virt.hardware [None req-2db54e89-b7c8-428c-855a-ae3533893b41 tempest-VolumesAssistedSnapshotsTest-64252410 tempest-VolumesAssistedSnapshotsTest-64252410-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 597.621841] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3088921b-7bbc-43ae-993f-71f9069fbee5 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.634347] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4696cc49-8036-42da-b59e-e2d1fd9b40a3 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.710787] env[62503]: DEBUG nova.network.neutron [None req-cc9648da-f367-499f-9ae4-d540c12618f7 tempest-ServerActionsTestJSON-1148358597 tempest-ServerActionsTestJSON-1148358597-project-member] [instance: d8d4c087-9b0f-48c7-bd05-291a7e2a3e83] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 597.920398] env[62503]: DEBUG nova.network.neutron [None req-cc9648da-f367-499f-9ae4-d540c12618f7 tempest-ServerActionsTestJSON-1148358597 tempest-ServerActionsTestJSON-1148358597-project-member] [instance: d8d4c087-9b0f-48c7-bd05-291a7e2a3e83] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 597.967660] env[62503]: DEBUG oslo_vmware.api [None req-280dd265-183b-4c2b-ae27-d0e1a87186ff tempest-ServersAdmin275Test-1807589186 tempest-ServersAdmin275Test-1807589186-project-admin] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]5222aaec-9c22-8394-02e8-ddc1b0323730, 'name': SearchDatastore_Task, 'duration_secs': 0.010733} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 597.968512] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d203a38f-3055-47cd-8247-524552d22aea {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.976327] env[62503]: DEBUG oslo_vmware.api [None req-280dd265-183b-4c2b-ae27-d0e1a87186ff tempest-ServersAdmin275Test-1807589186 tempest-ServersAdmin275Test-1807589186-project-admin] Waiting for the task: (returnval){ [ 597.976327] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52763291-0310-2179-9eb8-7dbd253f2f70" [ 597.976327] env[62503]: _type = "Task" [ 597.976327] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 597.986135] env[62503]: DEBUG oslo_vmware.api [None req-280dd265-183b-4c2b-ae27-d0e1a87186ff tempest-ServersAdmin275Test-1807589186 tempest-ServersAdmin275Test-1807589186-project-admin] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52763291-0310-2179-9eb8-7dbd253f2f70, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 598.135239] env[62503]: DEBUG nova.compute.manager [req-50d9ecab-22e6-4122-a043-27191ed1dae3 req-37e2dabf-79d6-4c28-8ff5-77c238b253de service nova] [instance: d8d4c087-9b0f-48c7-bd05-291a7e2a3e83] Received event network-vif-deleted-20d6b598-9cc1-4dee-aa92-c60595007db3 {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 598.135666] env[62503]: DEBUG oslo_concurrency.lockutils [None req-e7961620-13a6-43b0-8a39-27d7c19defe9 tempest-ServerDiagnosticsV248Test-736576457 tempest-ServerDiagnosticsV248Test-736576457-project-member] Lock "90e42997-a34c-4a39-8d2f-7ab0ed19f028" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 26.545s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 598.424986] env[62503]: DEBUG oslo_concurrency.lockutils [None req-cc9648da-f367-499f-9ae4-d540c12618f7 tempest-ServerActionsTestJSON-1148358597 tempest-ServerActionsTestJSON-1148358597-project-member] Releasing lock "refresh_cache-d8d4c087-9b0f-48c7-bd05-291a7e2a3e83" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 598.426596] env[62503]: DEBUG nova.compute.manager [None req-cc9648da-f367-499f-9ae4-d540c12618f7 tempest-ServerActionsTestJSON-1148358597 tempest-ServerActionsTestJSON-1148358597-project-member] [instance: d8d4c087-9b0f-48c7-bd05-291a7e2a3e83] Start destroying the instance on the hypervisor. {{(pid=62503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3156}} [ 598.427172] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-cc9648da-f367-499f-9ae4-d540c12618f7 tempest-ServerActionsTestJSON-1148358597 tempest-ServerActionsTestJSON-1148358597-project-member] [instance: d8d4c087-9b0f-48c7-bd05-291a7e2a3e83] Destroying instance {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 598.427296] env[62503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5951145f-ad5f-4b8d-9574-d90a655497d5 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.436766] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0619ec62-1b8f-455b-98bc-563b2afbbb77 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.468014] env[62503]: WARNING nova.virt.vmwareapi.vmops [None req-cc9648da-f367-499f-9ae4-d540c12618f7 tempest-ServerActionsTestJSON-1148358597 tempest-ServerActionsTestJSON-1148358597-project-member] [instance: d8d4c087-9b0f-48c7-bd05-291a7e2a3e83] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance d8d4c087-9b0f-48c7-bd05-291a7e2a3e83 could not be found. [ 598.468014] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-cc9648da-f367-499f-9ae4-d540c12618f7 tempest-ServerActionsTestJSON-1148358597 tempest-ServerActionsTestJSON-1148358597-project-member] [instance: d8d4c087-9b0f-48c7-bd05-291a7e2a3e83] Instance destroyed {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 598.468014] env[62503]: INFO nova.compute.manager [None req-cc9648da-f367-499f-9ae4-d540c12618f7 tempest-ServerActionsTestJSON-1148358597 tempest-ServerActionsTestJSON-1148358597-project-member] [instance: d8d4c087-9b0f-48c7-bd05-291a7e2a3e83] Took 0.04 seconds to destroy the instance on the hypervisor. [ 598.468014] env[62503]: DEBUG oslo.service.loopingcall [None req-cc9648da-f367-499f-9ae4-d540c12618f7 tempest-ServerActionsTestJSON-1148358597 tempest-ServerActionsTestJSON-1148358597-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 598.468014] env[62503]: DEBUG nova.compute.manager [-] [instance: d8d4c087-9b0f-48c7-bd05-291a7e2a3e83] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 598.468014] env[62503]: DEBUG nova.network.neutron [-] [instance: d8d4c087-9b0f-48c7-bd05-291a7e2a3e83] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 598.474108] env[62503]: DEBUG nova.compute.manager [req-d4771309-6773-4994-b75c-55401f855ef4 req-5c062023-e885-477d-92d6-187a72d3c9d6 service nova] [instance: 24b4c233-c874-452e-a7fc-492ca2a49a09] Received event network-changed-af8161d8-e134-4f1a-a976-807eaad5a1b0 {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 598.474189] env[62503]: DEBUG nova.compute.manager [req-d4771309-6773-4994-b75c-55401f855ef4 req-5c062023-e885-477d-92d6-187a72d3c9d6 service nova] [instance: 24b4c233-c874-452e-a7fc-492ca2a49a09] Refreshing instance network info cache due to event network-changed-af8161d8-e134-4f1a-a976-807eaad5a1b0. {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11432}} [ 598.474405] env[62503]: DEBUG oslo_concurrency.lockutils [req-d4771309-6773-4994-b75c-55401f855ef4 req-5c062023-e885-477d-92d6-187a72d3c9d6 service nova] Acquiring lock "refresh_cache-24b4c233-c874-452e-a7fc-492ca2a49a09" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 598.474546] env[62503]: DEBUG oslo_concurrency.lockutils [req-d4771309-6773-4994-b75c-55401f855ef4 req-5c062023-e885-477d-92d6-187a72d3c9d6 service nova] Acquired lock "refresh_cache-24b4c233-c874-452e-a7fc-492ca2a49a09" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 598.474710] env[62503]: DEBUG nova.network.neutron [req-d4771309-6773-4994-b75c-55401f855ef4 req-5c062023-e885-477d-92d6-187a72d3c9d6 service nova] [instance: 24b4c233-c874-452e-a7fc-492ca2a49a09] Refreshing network info cache for port af8161d8-e134-4f1a-a976-807eaad5a1b0 {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 598.488226] env[62503]: DEBUG oslo_vmware.api [None req-280dd265-183b-4c2b-ae27-d0e1a87186ff tempest-ServersAdmin275Test-1807589186 tempest-ServersAdmin275Test-1807589186-project-admin] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52763291-0310-2179-9eb8-7dbd253f2f70, 'name': SearchDatastore_Task, 'duration_secs': 0.008882} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 598.489212] env[62503]: DEBUG oslo_concurrency.lockutils [None req-280dd265-183b-4c2b-ae27-d0e1a87186ff tempest-ServersAdmin275Test-1807589186 tempest-ServersAdmin275Test-1807589186-project-admin] Releasing lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 598.489474] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-280dd265-183b-4c2b-ae27-d0e1a87186ff tempest-ServersAdmin275Test-1807589186 tempest-ServersAdmin275Test-1807589186-project-admin] Copying Virtual Disk [datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk to [datastore1] c4a88e75-690f-4bed-a4f9-a0de3b193eff/c4a88e75-690f-4bed-a4f9-a0de3b193eff.vmdk {{(pid=62503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 598.489998] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-299d6173-2297-4203-8f43-7823ffc51af7 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.497973] env[62503]: DEBUG oslo_vmware.api [None req-280dd265-183b-4c2b-ae27-d0e1a87186ff tempest-ServersAdmin275Test-1807589186 tempest-ServersAdmin275Test-1807589186-project-admin] Waiting for the task: (returnval){ [ 598.497973] env[62503]: value = "task-1387713" [ 598.497973] env[62503]: _type = "Task" [ 598.497973] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 598.503569] env[62503]: DEBUG nova.network.neutron [-] [instance: d8d4c087-9b0f-48c7-bd05-291a7e2a3e83] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 598.509791] env[62503]: DEBUG oslo_vmware.api [None req-280dd265-183b-4c2b-ae27-d0e1a87186ff tempest-ServersAdmin275Test-1807589186 tempest-ServersAdmin275Test-1807589186-project-admin] Task: {'id': task-1387713, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 598.881459] env[62503]: ERROR nova.compute.manager [None req-2db54e89-b7c8-428c-855a-ae3533893b41 tempest-VolumesAssistedSnapshotsTest-64252410 tempest-VolumesAssistedSnapshotsTest-64252410-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port af8161d8-e134-4f1a-a976-807eaad5a1b0, please check neutron logs for more information. [ 598.881459] env[62503]: ERROR nova.compute.manager Traceback (most recent call last): [ 598.881459] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 598.881459] env[62503]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 598.881459] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 598.881459] env[62503]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 598.881459] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 598.881459] env[62503]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 598.881459] env[62503]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 598.881459] env[62503]: ERROR nova.compute.manager self.force_reraise() [ 598.881459] env[62503]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 598.881459] env[62503]: ERROR nova.compute.manager raise self.value [ 598.881459] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 598.881459] env[62503]: ERROR nova.compute.manager updated_port = self._update_port( [ 598.881459] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 598.881459] env[62503]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 598.882046] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 598.882046] env[62503]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 598.882046] env[62503]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port af8161d8-e134-4f1a-a976-807eaad5a1b0, please check neutron logs for more information. [ 598.882046] env[62503]: ERROR nova.compute.manager [ 598.882046] env[62503]: Traceback (most recent call last): [ 598.882046] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 598.882046] env[62503]: listener.cb(fileno) [ 598.882046] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 598.882046] env[62503]: result = function(*args, **kwargs) [ 598.882046] env[62503]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 598.882046] env[62503]: return func(*args, **kwargs) [ 598.882046] env[62503]: File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 598.882046] env[62503]: raise e [ 598.882046] env[62503]: File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 598.882046] env[62503]: nwinfo = self.network_api.allocate_for_instance( [ 598.882046] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 598.882046] env[62503]: created_port_ids = self._update_ports_for_instance( [ 598.882046] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 598.882046] env[62503]: with excutils.save_and_reraise_exception(): [ 598.882046] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 598.882046] env[62503]: self.force_reraise() [ 598.882046] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 598.882046] env[62503]: raise self.value [ 598.882046] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 598.882046] env[62503]: updated_port = self._update_port( [ 598.882046] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 598.882046] env[62503]: _ensure_no_port_binding_failure(port) [ 598.882046] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 598.882046] env[62503]: raise exception.PortBindingFailed(port_id=port['id']) [ 598.883027] env[62503]: nova.exception.PortBindingFailed: Binding failed for port af8161d8-e134-4f1a-a976-807eaad5a1b0, please check neutron logs for more information. [ 598.883027] env[62503]: Removing descriptor: 21 [ 598.885039] env[62503]: ERROR nova.compute.manager [None req-2db54e89-b7c8-428c-855a-ae3533893b41 tempest-VolumesAssistedSnapshotsTest-64252410 tempest-VolumesAssistedSnapshotsTest-64252410-project-member] [instance: 24b4c233-c874-452e-a7fc-492ca2a49a09] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port af8161d8-e134-4f1a-a976-807eaad5a1b0, please check neutron logs for more information. [ 598.885039] env[62503]: ERROR nova.compute.manager [instance: 24b4c233-c874-452e-a7fc-492ca2a49a09] Traceback (most recent call last): [ 598.885039] env[62503]: ERROR nova.compute.manager [instance: 24b4c233-c874-452e-a7fc-492ca2a49a09] File "/opt/stack/nova/nova/compute/manager.py", line 2897, in _build_resources [ 598.885039] env[62503]: ERROR nova.compute.manager [instance: 24b4c233-c874-452e-a7fc-492ca2a49a09] yield resources [ 598.885039] env[62503]: ERROR nova.compute.manager [instance: 24b4c233-c874-452e-a7fc-492ca2a49a09] File "/opt/stack/nova/nova/compute/manager.py", line 2644, in _build_and_run_instance [ 598.885039] env[62503]: ERROR nova.compute.manager [instance: 24b4c233-c874-452e-a7fc-492ca2a49a09] self.driver.spawn(context, instance, image_meta, [ 598.885039] env[62503]: ERROR nova.compute.manager [instance: 24b4c233-c874-452e-a7fc-492ca2a49a09] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 598.885039] env[62503]: ERROR nova.compute.manager [instance: 24b4c233-c874-452e-a7fc-492ca2a49a09] self._vmops.spawn(context, instance, image_meta, injected_files, [ 598.885039] env[62503]: ERROR nova.compute.manager [instance: 24b4c233-c874-452e-a7fc-492ca2a49a09] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 598.885039] env[62503]: ERROR nova.compute.manager [instance: 24b4c233-c874-452e-a7fc-492ca2a49a09] vm_ref = self.build_virtual_machine(instance, [ 598.885039] env[62503]: ERROR nova.compute.manager [instance: 24b4c233-c874-452e-a7fc-492ca2a49a09] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 598.885498] env[62503]: ERROR nova.compute.manager [instance: 24b4c233-c874-452e-a7fc-492ca2a49a09] vif_infos = vmwarevif.get_vif_info(self._session, [ 598.885498] env[62503]: ERROR nova.compute.manager [instance: 24b4c233-c874-452e-a7fc-492ca2a49a09] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 598.885498] env[62503]: ERROR nova.compute.manager [instance: 24b4c233-c874-452e-a7fc-492ca2a49a09] for vif in network_info: [ 598.885498] env[62503]: ERROR nova.compute.manager [instance: 24b4c233-c874-452e-a7fc-492ca2a49a09] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 598.885498] env[62503]: ERROR nova.compute.manager [instance: 24b4c233-c874-452e-a7fc-492ca2a49a09] return self._sync_wrapper(fn, *args, **kwargs) [ 598.885498] env[62503]: ERROR nova.compute.manager [instance: 24b4c233-c874-452e-a7fc-492ca2a49a09] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 598.885498] env[62503]: ERROR nova.compute.manager [instance: 24b4c233-c874-452e-a7fc-492ca2a49a09] self.wait() [ 598.885498] env[62503]: ERROR nova.compute.manager [instance: 24b4c233-c874-452e-a7fc-492ca2a49a09] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 598.885498] env[62503]: ERROR nova.compute.manager [instance: 24b4c233-c874-452e-a7fc-492ca2a49a09] self[:] = self._gt.wait() [ 598.885498] env[62503]: ERROR nova.compute.manager [instance: 24b4c233-c874-452e-a7fc-492ca2a49a09] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 598.885498] env[62503]: ERROR nova.compute.manager [instance: 24b4c233-c874-452e-a7fc-492ca2a49a09] return self._exit_event.wait() [ 598.885498] env[62503]: ERROR nova.compute.manager [instance: 24b4c233-c874-452e-a7fc-492ca2a49a09] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 598.885498] env[62503]: ERROR nova.compute.manager [instance: 24b4c233-c874-452e-a7fc-492ca2a49a09] result = hub.switch() [ 598.885929] env[62503]: ERROR nova.compute.manager [instance: 24b4c233-c874-452e-a7fc-492ca2a49a09] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 598.885929] env[62503]: ERROR nova.compute.manager [instance: 24b4c233-c874-452e-a7fc-492ca2a49a09] return self.greenlet.switch() [ 598.885929] env[62503]: ERROR nova.compute.manager [instance: 24b4c233-c874-452e-a7fc-492ca2a49a09] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 598.885929] env[62503]: ERROR nova.compute.manager [instance: 24b4c233-c874-452e-a7fc-492ca2a49a09] result = function(*args, **kwargs) [ 598.885929] env[62503]: ERROR nova.compute.manager [instance: 24b4c233-c874-452e-a7fc-492ca2a49a09] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 598.885929] env[62503]: ERROR nova.compute.manager [instance: 24b4c233-c874-452e-a7fc-492ca2a49a09] return func(*args, **kwargs) [ 598.885929] env[62503]: ERROR nova.compute.manager [instance: 24b4c233-c874-452e-a7fc-492ca2a49a09] File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 598.885929] env[62503]: ERROR nova.compute.manager [instance: 24b4c233-c874-452e-a7fc-492ca2a49a09] raise e [ 598.885929] env[62503]: ERROR nova.compute.manager [instance: 24b4c233-c874-452e-a7fc-492ca2a49a09] File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 598.885929] env[62503]: ERROR nova.compute.manager [instance: 24b4c233-c874-452e-a7fc-492ca2a49a09] nwinfo = self.network_api.allocate_for_instance( [ 598.885929] env[62503]: ERROR nova.compute.manager [instance: 24b4c233-c874-452e-a7fc-492ca2a49a09] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 598.885929] env[62503]: ERROR nova.compute.manager [instance: 24b4c233-c874-452e-a7fc-492ca2a49a09] created_port_ids = self._update_ports_for_instance( [ 598.885929] env[62503]: ERROR nova.compute.manager [instance: 24b4c233-c874-452e-a7fc-492ca2a49a09] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 598.886358] env[62503]: ERROR nova.compute.manager [instance: 24b4c233-c874-452e-a7fc-492ca2a49a09] with excutils.save_and_reraise_exception(): [ 598.886358] env[62503]: ERROR nova.compute.manager [instance: 24b4c233-c874-452e-a7fc-492ca2a49a09] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 598.886358] env[62503]: ERROR nova.compute.manager [instance: 24b4c233-c874-452e-a7fc-492ca2a49a09] self.force_reraise() [ 598.886358] env[62503]: ERROR nova.compute.manager [instance: 24b4c233-c874-452e-a7fc-492ca2a49a09] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 598.886358] env[62503]: ERROR nova.compute.manager [instance: 24b4c233-c874-452e-a7fc-492ca2a49a09] raise self.value [ 598.886358] env[62503]: ERROR nova.compute.manager [instance: 24b4c233-c874-452e-a7fc-492ca2a49a09] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 598.886358] env[62503]: ERROR nova.compute.manager [instance: 24b4c233-c874-452e-a7fc-492ca2a49a09] updated_port = self._update_port( [ 598.886358] env[62503]: ERROR nova.compute.manager [instance: 24b4c233-c874-452e-a7fc-492ca2a49a09] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 598.886358] env[62503]: ERROR nova.compute.manager [instance: 24b4c233-c874-452e-a7fc-492ca2a49a09] _ensure_no_port_binding_failure(port) [ 598.886358] env[62503]: ERROR nova.compute.manager [instance: 24b4c233-c874-452e-a7fc-492ca2a49a09] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 598.886358] env[62503]: ERROR nova.compute.manager [instance: 24b4c233-c874-452e-a7fc-492ca2a49a09] raise exception.PortBindingFailed(port_id=port['id']) [ 598.886358] env[62503]: ERROR nova.compute.manager [instance: 24b4c233-c874-452e-a7fc-492ca2a49a09] nova.exception.PortBindingFailed: Binding failed for port af8161d8-e134-4f1a-a976-807eaad5a1b0, please check neutron logs for more information. [ 598.886358] env[62503]: ERROR nova.compute.manager [instance: 24b4c233-c874-452e-a7fc-492ca2a49a09] [ 598.886808] env[62503]: INFO nova.compute.manager [None req-2db54e89-b7c8-428c-855a-ae3533893b41 tempest-VolumesAssistedSnapshotsTest-64252410 tempest-VolumesAssistedSnapshotsTest-64252410-project-member] [instance: 24b4c233-c874-452e-a7fc-492ca2a49a09] Terminating instance [ 598.891654] env[62503]: DEBUG oslo_concurrency.lockutils [None req-2db54e89-b7c8-428c-855a-ae3533893b41 tempest-VolumesAssistedSnapshotsTest-64252410 tempest-VolumesAssistedSnapshotsTest-64252410-project-member] Acquiring lock "refresh_cache-24b4c233-c874-452e-a7fc-492ca2a49a09" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 599.007926] env[62503]: DEBUG nova.network.neutron [req-d4771309-6773-4994-b75c-55401f855ef4 req-5c062023-e885-477d-92d6-187a72d3c9d6 service nova] [instance: 24b4c233-c874-452e-a7fc-492ca2a49a09] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 599.008457] env[62503]: DEBUG nova.network.neutron [-] [instance: d8d4c087-9b0f-48c7-bd05-291a7e2a3e83] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 599.013035] env[62503]: DEBUG oslo_vmware.api [None req-280dd265-183b-4c2b-ae27-d0e1a87186ff tempest-ServersAdmin275Test-1807589186 tempest-ServersAdmin275Test-1807589186-project-admin] Task: {'id': task-1387713, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 599.064203] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d16f06d6-2098-477b-9cc8-d3056e4c9194 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.073749] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-415308c6-2b46-4572-87bb-689f07b4b14d {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.109982] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58d8f31e-aabb-471d-b920-e917191fe319 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.118958] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a2ee1ef-392f-4238-8bc8-fb58ad3a5571 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.134702] env[62503]: DEBUG nova.compute.provider_tree [None req-6fd2735d-f55f-4cf9-b0ab-01612c901fa1 tempest-AttachInterfacesUnderV243Test-714583448 tempest-AttachInterfacesUnderV243Test-714583448-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 599.267532] env[62503]: DEBUG nova.network.neutron [req-d4771309-6773-4994-b75c-55401f855ef4 req-5c062023-e885-477d-92d6-187a72d3c9d6 service nova] [instance: 24b4c233-c874-452e-a7fc-492ca2a49a09] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 599.515964] env[62503]: INFO nova.compute.manager [-] [instance: d8d4c087-9b0f-48c7-bd05-291a7e2a3e83] Took 1.05 seconds to deallocate network for instance. [ 599.516390] env[62503]: DEBUG oslo_vmware.api [None req-280dd265-183b-4c2b-ae27-d0e1a87186ff tempest-ServersAdmin275Test-1807589186 tempest-ServersAdmin275Test-1807589186-project-admin] Task: {'id': task-1387713, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.597074} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 599.523433] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-280dd265-183b-4c2b-ae27-d0e1a87186ff tempest-ServersAdmin275Test-1807589186 tempest-ServersAdmin275Test-1807589186-project-admin] Copied Virtual Disk [datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk to [datastore1] c4a88e75-690f-4bed-a4f9-a0de3b193eff/c4a88e75-690f-4bed-a4f9-a0de3b193eff.vmdk {{(pid=62503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 599.523433] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-280dd265-183b-4c2b-ae27-d0e1a87186ff tempest-ServersAdmin275Test-1807589186 tempest-ServersAdmin275Test-1807589186-project-admin] [instance: c4a88e75-690f-4bed-a4f9-a0de3b193eff] Extending root virtual disk to 1048576 {{(pid=62503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 599.523433] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3bea90c2-fa19-4562-bd59-6d78fb842ff7 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.527278] env[62503]: DEBUG nova.compute.claims [None req-cc9648da-f367-499f-9ae4-d540c12618f7 tempest-ServerActionsTestJSON-1148358597 tempest-ServerActionsTestJSON-1148358597-project-member] [instance: d8d4c087-9b0f-48c7-bd05-291a7e2a3e83] Aborting claim: {{(pid=62503) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 599.527591] env[62503]: DEBUG oslo_concurrency.lockutils [None req-cc9648da-f367-499f-9ae4-d540c12618f7 tempest-ServerActionsTestJSON-1148358597 tempest-ServerActionsTestJSON-1148358597-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 599.533073] env[62503]: DEBUG oslo_vmware.api [None req-280dd265-183b-4c2b-ae27-d0e1a87186ff tempest-ServersAdmin275Test-1807589186 tempest-ServersAdmin275Test-1807589186-project-admin] Waiting for the task: (returnval){ [ 599.533073] env[62503]: value = "task-1387714" [ 599.533073] env[62503]: _type = "Task" [ 599.533073] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 599.542850] env[62503]: DEBUG oslo_vmware.api [None req-280dd265-183b-4c2b-ae27-d0e1a87186ff tempest-ServersAdmin275Test-1807589186 tempest-ServersAdmin275Test-1807589186-project-admin] Task: {'id': task-1387714, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 599.645022] env[62503]: DEBUG nova.scheduler.client.report [None req-6fd2735d-f55f-4cf9-b0ab-01612c901fa1 tempest-AttachInterfacesUnderV243Test-714583448 tempest-AttachInterfacesUnderV243Test-714583448-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 599.770792] env[62503]: DEBUG oslo_concurrency.lockutils [req-d4771309-6773-4994-b75c-55401f855ef4 req-5c062023-e885-477d-92d6-187a72d3c9d6 service nova] Releasing lock "refresh_cache-24b4c233-c874-452e-a7fc-492ca2a49a09" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 599.771191] env[62503]: DEBUG oslo_concurrency.lockutils [None req-2db54e89-b7c8-428c-855a-ae3533893b41 tempest-VolumesAssistedSnapshotsTest-64252410 tempest-VolumesAssistedSnapshotsTest-64252410-project-member] Acquired lock "refresh_cache-24b4c233-c874-452e-a7fc-492ca2a49a09" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 599.771375] env[62503]: DEBUG nova.network.neutron [None req-2db54e89-b7c8-428c-855a-ae3533893b41 tempest-VolumesAssistedSnapshotsTest-64252410 tempest-VolumesAssistedSnapshotsTest-64252410-project-member] [instance: 24b4c233-c874-452e-a7fc-492ca2a49a09] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 600.044896] env[62503]: DEBUG oslo_vmware.api [None req-280dd265-183b-4c2b-ae27-d0e1a87186ff tempest-ServersAdmin275Test-1807589186 tempest-ServersAdmin275Test-1807589186-project-admin] Task: {'id': task-1387714, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072307} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 600.045747] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-280dd265-183b-4c2b-ae27-d0e1a87186ff tempest-ServersAdmin275Test-1807589186 tempest-ServersAdmin275Test-1807589186-project-admin] [instance: c4a88e75-690f-4bed-a4f9-a0de3b193eff] Extended root virtual disk {{(pid=62503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 600.046594] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a523eeba-e4ed-466d-ad29-30aaf0ecd0a6 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.079471] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-280dd265-183b-4c2b-ae27-d0e1a87186ff tempest-ServersAdmin275Test-1807589186 tempest-ServersAdmin275Test-1807589186-project-admin] [instance: c4a88e75-690f-4bed-a4f9-a0de3b193eff] Reconfiguring VM instance instance-0000000b to attach disk [datastore1] c4a88e75-690f-4bed-a4f9-a0de3b193eff/c4a88e75-690f-4bed-a4f9-a0de3b193eff.vmdk or device None with type sparse {{(pid=62503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 600.079801] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-615d4045-faec-4236-af5e-4301437557ef {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.106248] env[62503]: DEBUG oslo_vmware.api [None req-280dd265-183b-4c2b-ae27-d0e1a87186ff tempest-ServersAdmin275Test-1807589186 tempest-ServersAdmin275Test-1807589186-project-admin] Waiting for the task: (returnval){ [ 600.106248] env[62503]: value = "task-1387715" [ 600.106248] env[62503]: _type = "Task" [ 600.106248] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 600.118053] env[62503]: DEBUG oslo_vmware.api [None req-280dd265-183b-4c2b-ae27-d0e1a87186ff tempest-ServersAdmin275Test-1807589186 tempest-ServersAdmin275Test-1807589186-project-admin] Task: {'id': task-1387715, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 600.149509] env[62503]: DEBUG oslo_concurrency.lockutils [None req-6fd2735d-f55f-4cf9-b0ab-01612c901fa1 tempest-AttachInterfacesUnderV243Test-714583448 tempest-AttachInterfacesUnderV243Test-714583448-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.593s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 600.149509] env[62503]: DEBUG nova.compute.manager [None req-6fd2735d-f55f-4cf9-b0ab-01612c901fa1 tempest-AttachInterfacesUnderV243Test-714583448 tempest-AttachInterfacesUnderV243Test-714583448-project-member] [instance: 20bf8c62-8b80-45c2-98d4-5a960f465aa0] Start building networks asynchronously for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2832}} [ 600.153223] env[62503]: DEBUG oslo_concurrency.lockutils [None req-4bad5bb1-7fa9-4e2b-9ed4-ab328e1c526a tempest-ServerActionsTestOtherB-427346871 tempest-ServerActionsTestOtherB-427346871-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.054s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 600.154930] env[62503]: INFO nova.compute.claims [None req-4bad5bb1-7fa9-4e2b-9ed4-ab328e1c526a tempest-ServerActionsTestOtherB-427346871 tempest-ServerActionsTestOtherB-427346871-project-member] [instance: bef7d4e7-9af2-4071-ae6d-bdbfa7f46460] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 600.312082] env[62503]: DEBUG nova.network.neutron [None req-2db54e89-b7c8-428c-855a-ae3533893b41 tempest-VolumesAssistedSnapshotsTest-64252410 tempest-VolumesAssistedSnapshotsTest-64252410-project-member] [instance: 24b4c233-c874-452e-a7fc-492ca2a49a09] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 600.444434] env[62503]: DEBUG nova.network.neutron [None req-2db54e89-b7c8-428c-855a-ae3533893b41 tempest-VolumesAssistedSnapshotsTest-64252410 tempest-VolumesAssistedSnapshotsTest-64252410-project-member] [instance: 24b4c233-c874-452e-a7fc-492ca2a49a09] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 600.507631] env[62503]: DEBUG nova.compute.manager [req-3cee39e1-b4f9-46ac-8c63-ed92a2dcf2e0 req-14e4878b-0b4c-4edb-9b83-2ffba682f1be service nova] [instance: 24b4c233-c874-452e-a7fc-492ca2a49a09] Received event network-vif-deleted-af8161d8-e134-4f1a-a976-807eaad5a1b0 {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 600.623728] env[62503]: DEBUG oslo_vmware.api [None req-280dd265-183b-4c2b-ae27-d0e1a87186ff tempest-ServersAdmin275Test-1807589186 tempest-ServersAdmin275Test-1807589186-project-admin] Task: {'id': task-1387715, 'name': ReconfigVM_Task, 'duration_secs': 0.333421} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 600.625471] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-280dd265-183b-4c2b-ae27-d0e1a87186ff tempest-ServersAdmin275Test-1807589186 tempest-ServersAdmin275Test-1807589186-project-admin] [instance: c4a88e75-690f-4bed-a4f9-a0de3b193eff] Reconfigured VM instance instance-0000000b to attach disk [datastore1] c4a88e75-690f-4bed-a4f9-a0de3b193eff/c4a88e75-690f-4bed-a4f9-a0de3b193eff.vmdk or device None with type sparse {{(pid=62503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 600.625977] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-de9f660b-56f0-4af2-b98d-6706c8b9c418 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.638090] env[62503]: DEBUG oslo_vmware.api [None req-280dd265-183b-4c2b-ae27-d0e1a87186ff tempest-ServersAdmin275Test-1807589186 tempest-ServersAdmin275Test-1807589186-project-admin] Waiting for the task: (returnval){ [ 600.638090] env[62503]: value = "task-1387717" [ 600.638090] env[62503]: _type = "Task" [ 600.638090] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 600.654902] env[62503]: DEBUG oslo_vmware.api [None req-280dd265-183b-4c2b-ae27-d0e1a87186ff tempest-ServersAdmin275Test-1807589186 tempest-ServersAdmin275Test-1807589186-project-admin] Task: {'id': task-1387717, 'name': Rename_Task} progress is 5%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 600.660151] env[62503]: DEBUG nova.compute.utils [None req-6fd2735d-f55f-4cf9-b0ab-01612c901fa1 tempest-AttachInterfacesUnderV243Test-714583448 tempest-AttachInterfacesUnderV243Test-714583448-project-member] Using /dev/sd instead of None {{(pid=62503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 600.664536] env[62503]: DEBUG nova.compute.manager [None req-6fd2735d-f55f-4cf9-b0ab-01612c901fa1 tempest-AttachInterfacesUnderV243Test-714583448 tempest-AttachInterfacesUnderV243Test-714583448-project-member] [instance: 20bf8c62-8b80-45c2-98d4-5a960f465aa0] Allocating IP information in the background. {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 600.664536] env[62503]: DEBUG nova.network.neutron [None req-6fd2735d-f55f-4cf9-b0ab-01612c901fa1 tempest-AttachInterfacesUnderV243Test-714583448 tempest-AttachInterfacesUnderV243Test-714583448-project-member] [instance: 20bf8c62-8b80-45c2-98d4-5a960f465aa0] allocate_for_instance() {{(pid=62503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 600.732429] env[62503]: DEBUG nova.policy [None req-6fd2735d-f55f-4cf9-b0ab-01612c901fa1 tempest-AttachInterfacesUnderV243Test-714583448 tempest-AttachInterfacesUnderV243Test-714583448-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ef47975783cf46a98082bc8d2d87b2bf', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '09bdea2bf71e432592262591e056db97', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62503) authorize /opt/stack/nova/nova/policy.py:201}} [ 600.948945] env[62503]: DEBUG oslo_concurrency.lockutils [None req-2db54e89-b7c8-428c-855a-ae3533893b41 tempest-VolumesAssistedSnapshotsTest-64252410 tempest-VolumesAssistedSnapshotsTest-64252410-project-member] Releasing lock "refresh_cache-24b4c233-c874-452e-a7fc-492ca2a49a09" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 600.949581] env[62503]: DEBUG nova.compute.manager [None req-2db54e89-b7c8-428c-855a-ae3533893b41 tempest-VolumesAssistedSnapshotsTest-64252410 tempest-VolumesAssistedSnapshotsTest-64252410-project-member] [instance: 24b4c233-c874-452e-a7fc-492ca2a49a09] Start destroying the instance on the hypervisor. {{(pid=62503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3156}} [ 600.949581] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-2db54e89-b7c8-428c-855a-ae3533893b41 tempest-VolumesAssistedSnapshotsTest-64252410 tempest-VolumesAssistedSnapshotsTest-64252410-project-member] [instance: 24b4c233-c874-452e-a7fc-492ca2a49a09] Destroying instance {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 600.950233] env[62503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f6f5ac36-7dc6-4aaa-932f-ca4c24d82c13 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.971069] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ec51a50-3134-43d9-b305-c2a70431244f {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.007257] env[62503]: WARNING nova.virt.vmwareapi.vmops [None req-2db54e89-b7c8-428c-855a-ae3533893b41 tempest-VolumesAssistedSnapshotsTest-64252410 tempest-VolumesAssistedSnapshotsTest-64252410-project-member] [instance: 24b4c233-c874-452e-a7fc-492ca2a49a09] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 24b4c233-c874-452e-a7fc-492ca2a49a09 could not be found. [ 601.007623] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-2db54e89-b7c8-428c-855a-ae3533893b41 tempest-VolumesAssistedSnapshotsTest-64252410 tempest-VolumesAssistedSnapshotsTest-64252410-project-member] [instance: 24b4c233-c874-452e-a7fc-492ca2a49a09] Instance destroyed {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 601.007701] env[62503]: INFO nova.compute.manager [None req-2db54e89-b7c8-428c-855a-ae3533893b41 tempest-VolumesAssistedSnapshotsTest-64252410 tempest-VolumesAssistedSnapshotsTest-64252410-project-member] [instance: 24b4c233-c874-452e-a7fc-492ca2a49a09] Took 0.06 seconds to destroy the instance on the hypervisor. [ 601.007974] env[62503]: DEBUG oslo.service.loopingcall [None req-2db54e89-b7c8-428c-855a-ae3533893b41 tempest-VolumesAssistedSnapshotsTest-64252410 tempest-VolumesAssistedSnapshotsTest-64252410-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 601.012394] env[62503]: DEBUG nova.compute.manager [-] [instance: 24b4c233-c874-452e-a7fc-492ca2a49a09] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 601.012552] env[62503]: DEBUG nova.network.neutron [-] [instance: 24b4c233-c874-452e-a7fc-492ca2a49a09] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 601.039124] env[62503]: DEBUG nova.network.neutron [-] [instance: 24b4c233-c874-452e-a7fc-492ca2a49a09] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 601.154434] env[62503]: DEBUG oslo_vmware.api [None req-280dd265-183b-4c2b-ae27-d0e1a87186ff tempest-ServersAdmin275Test-1807589186 tempest-ServersAdmin275Test-1807589186-project-admin] Task: {'id': task-1387717, 'name': Rename_Task, 'duration_secs': 0.192521} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 601.155139] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-280dd265-183b-4c2b-ae27-d0e1a87186ff tempest-ServersAdmin275Test-1807589186 tempest-ServersAdmin275Test-1807589186-project-admin] [instance: c4a88e75-690f-4bed-a4f9-a0de3b193eff] Powering on the VM {{(pid=62503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 601.155811] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-526623c7-e6f0-4e4a-bad3-cefaa5270faf {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.165548] env[62503]: DEBUG oslo_vmware.api [None req-280dd265-183b-4c2b-ae27-d0e1a87186ff tempest-ServersAdmin275Test-1807589186 tempest-ServersAdmin275Test-1807589186-project-admin] Waiting for the task: (returnval){ [ 601.165548] env[62503]: value = "task-1387718" [ 601.165548] env[62503]: _type = "Task" [ 601.165548] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 601.173512] env[62503]: DEBUG nova.compute.manager [None req-6fd2735d-f55f-4cf9-b0ab-01612c901fa1 tempest-AttachInterfacesUnderV243Test-714583448 tempest-AttachInterfacesUnderV243Test-714583448-project-member] [instance: 20bf8c62-8b80-45c2-98d4-5a960f465aa0] Start building block device mappings for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2867}} [ 601.187116] env[62503]: DEBUG oslo_vmware.api [None req-280dd265-183b-4c2b-ae27-d0e1a87186ff tempest-ServersAdmin275Test-1807589186 tempest-ServersAdmin275Test-1807589186-project-admin] Task: {'id': task-1387718, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 601.544105] env[62503]: DEBUG nova.network.neutron [-] [instance: 24b4c233-c874-452e-a7fc-492ca2a49a09] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 601.658275] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69c6a9d1-76a0-4a52-8596-9452dde2e947 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.666362] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8a6ce87-8e7b-499a-909b-bb393222e562 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.684635] env[62503]: DEBUG oslo_vmware.api [None req-280dd265-183b-4c2b-ae27-d0e1a87186ff tempest-ServersAdmin275Test-1807589186 tempest-ServersAdmin275Test-1807589186-project-admin] Task: {'id': task-1387718, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 601.724209] env[62503]: DEBUG nova.network.neutron [None req-6fd2735d-f55f-4cf9-b0ab-01612c901fa1 tempest-AttachInterfacesUnderV243Test-714583448 tempest-AttachInterfacesUnderV243Test-714583448-project-member] [instance: 20bf8c62-8b80-45c2-98d4-5a960f465aa0] Successfully created port: 3d2fc2a3-fced-4fde-98d7-29167b92782b {{(pid=62503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 601.726391] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2af8f83a-e585-4afe-9a15-8d36fed079c3 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.737526] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4d8e198-e0b4-4ba7-8d44-c97eef1e0995 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.753460] env[62503]: DEBUG nova.compute.provider_tree [None req-4bad5bb1-7fa9-4e2b-9ed4-ab328e1c526a tempest-ServerActionsTestOtherB-427346871 tempest-ServerActionsTestOtherB-427346871-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 602.051820] env[62503]: INFO nova.compute.manager [-] [instance: 24b4c233-c874-452e-a7fc-492ca2a49a09] Took 1.04 seconds to deallocate network for instance. [ 602.057019] env[62503]: DEBUG nova.compute.claims [None req-2db54e89-b7c8-428c-855a-ae3533893b41 tempest-VolumesAssistedSnapshotsTest-64252410 tempest-VolumesAssistedSnapshotsTest-64252410-project-member] [instance: 24b4c233-c874-452e-a7fc-492ca2a49a09] Aborting claim: {{(pid=62503) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 602.057675] env[62503]: DEBUG oslo_concurrency.lockutils [None req-2db54e89-b7c8-428c-855a-ae3533893b41 tempest-VolumesAssistedSnapshotsTest-64252410 tempest-VolumesAssistedSnapshotsTest-64252410-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 602.184949] env[62503]: DEBUG oslo_vmware.api [None req-280dd265-183b-4c2b-ae27-d0e1a87186ff tempest-ServersAdmin275Test-1807589186 tempest-ServersAdmin275Test-1807589186-project-admin] Task: {'id': task-1387718, 'name': PowerOnVM_Task, 'duration_secs': 0.762753} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 602.185458] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-280dd265-183b-4c2b-ae27-d0e1a87186ff tempest-ServersAdmin275Test-1807589186 tempest-ServersAdmin275Test-1807589186-project-admin] [instance: c4a88e75-690f-4bed-a4f9-a0de3b193eff] Powered on the VM {{(pid=62503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 602.185655] env[62503]: DEBUG nova.compute.manager [None req-280dd265-183b-4c2b-ae27-d0e1a87186ff tempest-ServersAdmin275Test-1807589186 tempest-ServersAdmin275Test-1807589186-project-admin] [instance: c4a88e75-690f-4bed-a4f9-a0de3b193eff] Checking state {{(pid=62503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1794}} [ 602.186693] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da3d75e4-1f76-4153-9c00-e981b72a613e {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.223610] env[62503]: DEBUG nova.compute.manager [None req-6fd2735d-f55f-4cf9-b0ab-01612c901fa1 tempest-AttachInterfacesUnderV243Test-714583448 tempest-AttachInterfacesUnderV243Test-714583448-project-member] [instance: 20bf8c62-8b80-45c2-98d4-5a960f465aa0] Start spawning the instance on the hypervisor. {{(pid=62503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2641}} [ 602.260779] env[62503]: DEBUG nova.scheduler.client.report [None req-4bad5bb1-7fa9-4e2b-9ed4-ab328e1c526a tempest-ServerActionsTestOtherB-427346871 tempest-ServerActionsTestOtherB-427346871-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 602.278324] env[62503]: DEBUG nova.virt.hardware [None req-6fd2735d-f55f-4cf9-b0ab-01612c901fa1 tempest-AttachInterfacesUnderV243Test-714583448 tempest-AttachInterfacesUnderV243Test-714583448-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:26:20Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:26:03Z,direct_url=,disk_format='vmdk',id=8150ca02-f879-471d-8913-459408f127a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26d9ee75d25b4018b8bb1fb11c8bd98c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:26:04Z,virtual_size=,visibility=), allow threads: False {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 602.278571] env[62503]: DEBUG nova.virt.hardware [None req-6fd2735d-f55f-4cf9-b0ab-01612c901fa1 tempest-AttachInterfacesUnderV243Test-714583448 tempest-AttachInterfacesUnderV243Test-714583448-project-member] Flavor limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 602.278724] env[62503]: DEBUG nova.virt.hardware [None req-6fd2735d-f55f-4cf9-b0ab-01612c901fa1 tempest-AttachInterfacesUnderV243Test-714583448 tempest-AttachInterfacesUnderV243Test-714583448-project-member] Image limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 602.278906] env[62503]: DEBUG nova.virt.hardware [None req-6fd2735d-f55f-4cf9-b0ab-01612c901fa1 tempest-AttachInterfacesUnderV243Test-714583448 tempest-AttachInterfacesUnderV243Test-714583448-project-member] Flavor pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 602.279072] env[62503]: DEBUG nova.virt.hardware [None req-6fd2735d-f55f-4cf9-b0ab-01612c901fa1 tempest-AttachInterfacesUnderV243Test-714583448 tempest-AttachInterfacesUnderV243Test-714583448-project-member] Image pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 602.279221] env[62503]: DEBUG nova.virt.hardware [None req-6fd2735d-f55f-4cf9-b0ab-01612c901fa1 tempest-AttachInterfacesUnderV243Test-714583448 tempest-AttachInterfacesUnderV243Test-714583448-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 602.279426] env[62503]: DEBUG nova.virt.hardware [None req-6fd2735d-f55f-4cf9-b0ab-01612c901fa1 tempest-AttachInterfacesUnderV243Test-714583448 tempest-AttachInterfacesUnderV243Test-714583448-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 602.279584] env[62503]: DEBUG nova.virt.hardware [None req-6fd2735d-f55f-4cf9-b0ab-01612c901fa1 tempest-AttachInterfacesUnderV243Test-714583448 tempest-AttachInterfacesUnderV243Test-714583448-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 602.279750] env[62503]: DEBUG nova.virt.hardware [None req-6fd2735d-f55f-4cf9-b0ab-01612c901fa1 tempest-AttachInterfacesUnderV243Test-714583448 tempest-AttachInterfacesUnderV243Test-714583448-project-member] Got 1 possible topologies {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 602.279919] env[62503]: DEBUG nova.virt.hardware [None req-6fd2735d-f55f-4cf9-b0ab-01612c901fa1 tempest-AttachInterfacesUnderV243Test-714583448 tempest-AttachInterfacesUnderV243Test-714583448-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 602.280097] env[62503]: DEBUG nova.virt.hardware [None req-6fd2735d-f55f-4cf9-b0ab-01612c901fa1 tempest-AttachInterfacesUnderV243Test-714583448 tempest-AttachInterfacesUnderV243Test-714583448-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 602.281329] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c19b44d1-171b-45c7-aca7-6c5ddb20ec71 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.291311] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4465bb2-d4f5-4d91-9da5-17dc1abef905 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.591705] env[62503]: DEBUG oslo_concurrency.lockutils [None req-3e326ade-9b42-48b9-9ec9-471f7156ba7e tempest-AttachInterfacesV270Test-1411655473 tempest-AttachInterfacesV270Test-1411655473-project-member] Acquiring lock "9bda2d4c-38c0-49ba-9a69-402869ff6a65" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 602.593053] env[62503]: DEBUG oslo_concurrency.lockutils [None req-3e326ade-9b42-48b9-9ec9-471f7156ba7e tempest-AttachInterfacesV270Test-1411655473 tempest-AttachInterfacesV270Test-1411655473-project-member] Lock "9bda2d4c-38c0-49ba-9a69-402869ff6a65" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 602.717248] env[62503]: DEBUG oslo_concurrency.lockutils [None req-280dd265-183b-4c2b-ae27-d0e1a87186ff tempest-ServersAdmin275Test-1807589186 tempest-ServersAdmin275Test-1807589186-project-admin] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 602.772016] env[62503]: DEBUG oslo_concurrency.lockutils [None req-4bad5bb1-7fa9-4e2b-9ed4-ab328e1c526a tempest-ServerActionsTestOtherB-427346871 tempest-ServerActionsTestOtherB-427346871-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.619s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 602.772681] env[62503]: DEBUG nova.compute.manager [None req-4bad5bb1-7fa9-4e2b-9ed4-ab328e1c526a tempest-ServerActionsTestOtherB-427346871 tempest-ServerActionsTestOtherB-427346871-project-member] [instance: bef7d4e7-9af2-4071-ae6d-bdbfa7f46460] Start building networks asynchronously for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2832}} [ 602.775682] env[62503]: DEBUG oslo_concurrency.lockutils [None req-68ed7e64-78f9-431d-ac11-4e539144d770 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.713s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 602.777331] env[62503]: INFO nova.compute.claims [None req-68ed7e64-78f9-431d-ac11-4e539144d770 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: eed1dcc3-d9f9-4211-a4c3-850dcdad72b1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 603.012897] env[62503]: DEBUG oslo_concurrency.lockutils [None req-011ff5ca-86ef-4873-993e-6255c4300036 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Acquiring lock "c4a88e75-690f-4bed-a4f9-a0de3b193eff" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 603.012897] env[62503]: DEBUG oslo_concurrency.lockutils [None req-011ff5ca-86ef-4873-993e-6255c4300036 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Lock "c4a88e75-690f-4bed-a4f9-a0de3b193eff" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 603.012897] env[62503]: DEBUG oslo_concurrency.lockutils [None req-011ff5ca-86ef-4873-993e-6255c4300036 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Acquiring lock "c4a88e75-690f-4bed-a4f9-a0de3b193eff-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 603.012897] env[62503]: DEBUG oslo_concurrency.lockutils [None req-011ff5ca-86ef-4873-993e-6255c4300036 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Lock "c4a88e75-690f-4bed-a4f9-a0de3b193eff-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 603.013071] env[62503]: DEBUG oslo_concurrency.lockutils [None req-011ff5ca-86ef-4873-993e-6255c4300036 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Lock "c4a88e75-690f-4bed-a4f9-a0de3b193eff-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 603.013641] env[62503]: INFO nova.compute.manager [None req-011ff5ca-86ef-4873-993e-6255c4300036 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] [instance: c4a88e75-690f-4bed-a4f9-a0de3b193eff] Terminating instance [ 603.015378] env[62503]: DEBUG oslo_concurrency.lockutils [None req-011ff5ca-86ef-4873-993e-6255c4300036 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Acquiring lock "refresh_cache-c4a88e75-690f-4bed-a4f9-a0de3b193eff" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 603.015660] env[62503]: DEBUG oslo_concurrency.lockutils [None req-011ff5ca-86ef-4873-993e-6255c4300036 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Acquired lock "refresh_cache-c4a88e75-690f-4bed-a4f9-a0de3b193eff" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 603.015938] env[62503]: DEBUG nova.network.neutron [None req-011ff5ca-86ef-4873-993e-6255c4300036 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] [instance: c4a88e75-690f-4bed-a4f9-a0de3b193eff] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 603.068154] env[62503]: DEBUG nova.compute.manager [req-3f6e3c2c-4549-4159-a84f-41cb1d7147ba req-44a46264-e907-4d22-8e8c-d44565c3306e service nova] [instance: 20bf8c62-8b80-45c2-98d4-5a960f465aa0] Received event network-changed-3d2fc2a3-fced-4fde-98d7-29167b92782b {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 603.068154] env[62503]: DEBUG nova.compute.manager [req-3f6e3c2c-4549-4159-a84f-41cb1d7147ba req-44a46264-e907-4d22-8e8c-d44565c3306e service nova] [instance: 20bf8c62-8b80-45c2-98d4-5a960f465aa0] Refreshing instance network info cache due to event network-changed-3d2fc2a3-fced-4fde-98d7-29167b92782b. {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11432}} [ 603.068154] env[62503]: DEBUG oslo_concurrency.lockutils [req-3f6e3c2c-4549-4159-a84f-41cb1d7147ba req-44a46264-e907-4d22-8e8c-d44565c3306e service nova] Acquiring lock "refresh_cache-20bf8c62-8b80-45c2-98d4-5a960f465aa0" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 603.068154] env[62503]: DEBUG oslo_concurrency.lockutils [req-3f6e3c2c-4549-4159-a84f-41cb1d7147ba req-44a46264-e907-4d22-8e8c-d44565c3306e service nova] Acquired lock "refresh_cache-20bf8c62-8b80-45c2-98d4-5a960f465aa0" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 603.068154] env[62503]: DEBUG nova.network.neutron [req-3f6e3c2c-4549-4159-a84f-41cb1d7147ba req-44a46264-e907-4d22-8e8c-d44565c3306e service nova] [instance: 20bf8c62-8b80-45c2-98d4-5a960f465aa0] Refreshing network info cache for port 3d2fc2a3-fced-4fde-98d7-29167b92782b {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 603.284449] env[62503]: DEBUG nova.compute.utils [None req-4bad5bb1-7fa9-4e2b-9ed4-ab328e1c526a tempest-ServerActionsTestOtherB-427346871 tempest-ServerActionsTestOtherB-427346871-project-member] Using /dev/sd instead of None {{(pid=62503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 603.290834] env[62503]: DEBUG nova.compute.manager [None req-4bad5bb1-7fa9-4e2b-9ed4-ab328e1c526a tempest-ServerActionsTestOtherB-427346871 tempest-ServerActionsTestOtherB-427346871-project-member] [instance: bef7d4e7-9af2-4071-ae6d-bdbfa7f46460] Allocating IP information in the background. {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 603.290917] env[62503]: DEBUG nova.network.neutron [None req-4bad5bb1-7fa9-4e2b-9ed4-ab328e1c526a tempest-ServerActionsTestOtherB-427346871 tempest-ServerActionsTestOtherB-427346871-project-member] [instance: bef7d4e7-9af2-4071-ae6d-bdbfa7f46460] allocate_for_instance() {{(pid=62503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 603.359092] env[62503]: DEBUG nova.policy [None req-4bad5bb1-7fa9-4e2b-9ed4-ab328e1c526a tempest-ServerActionsTestOtherB-427346871 tempest-ServerActionsTestOtherB-427346871-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '98885f8869f641c98564645dccb19b82', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '192d05492b0541e08c24072289752ba9', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62503) authorize /opt/stack/nova/nova/policy.py:201}} [ 603.500034] env[62503]: ERROR nova.compute.manager [None req-6fd2735d-f55f-4cf9-b0ab-01612c901fa1 tempest-AttachInterfacesUnderV243Test-714583448 tempest-AttachInterfacesUnderV243Test-714583448-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 3d2fc2a3-fced-4fde-98d7-29167b92782b, please check neutron logs for more information. [ 603.500034] env[62503]: ERROR nova.compute.manager Traceback (most recent call last): [ 603.500034] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 603.500034] env[62503]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 603.500034] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 603.500034] env[62503]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 603.500034] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 603.500034] env[62503]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 603.500034] env[62503]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 603.500034] env[62503]: ERROR nova.compute.manager self.force_reraise() [ 603.500034] env[62503]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 603.500034] env[62503]: ERROR nova.compute.manager raise self.value [ 603.500034] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 603.500034] env[62503]: ERROR nova.compute.manager updated_port = self._update_port( [ 603.500034] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 603.500034] env[62503]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 603.501657] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 603.501657] env[62503]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 603.501657] env[62503]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 3d2fc2a3-fced-4fde-98d7-29167b92782b, please check neutron logs for more information. [ 603.501657] env[62503]: ERROR nova.compute.manager [ 603.501657] env[62503]: Traceback (most recent call last): [ 603.501657] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 603.501657] env[62503]: listener.cb(fileno) [ 603.501657] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 603.501657] env[62503]: result = function(*args, **kwargs) [ 603.501657] env[62503]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 603.501657] env[62503]: return func(*args, **kwargs) [ 603.501657] env[62503]: File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 603.501657] env[62503]: raise e [ 603.501657] env[62503]: File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 603.501657] env[62503]: nwinfo = self.network_api.allocate_for_instance( [ 603.501657] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 603.501657] env[62503]: created_port_ids = self._update_ports_for_instance( [ 603.501657] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 603.501657] env[62503]: with excutils.save_and_reraise_exception(): [ 603.501657] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 603.501657] env[62503]: self.force_reraise() [ 603.501657] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 603.501657] env[62503]: raise self.value [ 603.501657] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 603.501657] env[62503]: updated_port = self._update_port( [ 603.501657] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 603.501657] env[62503]: _ensure_no_port_binding_failure(port) [ 603.501657] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 603.501657] env[62503]: raise exception.PortBindingFailed(port_id=port['id']) [ 603.502544] env[62503]: nova.exception.PortBindingFailed: Binding failed for port 3d2fc2a3-fced-4fde-98d7-29167b92782b, please check neutron logs for more information. [ 603.502544] env[62503]: Removing descriptor: 21 [ 603.502544] env[62503]: ERROR nova.compute.manager [None req-6fd2735d-f55f-4cf9-b0ab-01612c901fa1 tempest-AttachInterfacesUnderV243Test-714583448 tempest-AttachInterfacesUnderV243Test-714583448-project-member] [instance: 20bf8c62-8b80-45c2-98d4-5a960f465aa0] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 3d2fc2a3-fced-4fde-98d7-29167b92782b, please check neutron logs for more information. [ 603.502544] env[62503]: ERROR nova.compute.manager [instance: 20bf8c62-8b80-45c2-98d4-5a960f465aa0] Traceback (most recent call last): [ 603.502544] env[62503]: ERROR nova.compute.manager [instance: 20bf8c62-8b80-45c2-98d4-5a960f465aa0] File "/opt/stack/nova/nova/compute/manager.py", line 2897, in _build_resources [ 603.502544] env[62503]: ERROR nova.compute.manager [instance: 20bf8c62-8b80-45c2-98d4-5a960f465aa0] yield resources [ 603.502544] env[62503]: ERROR nova.compute.manager [instance: 20bf8c62-8b80-45c2-98d4-5a960f465aa0] File "/opt/stack/nova/nova/compute/manager.py", line 2644, in _build_and_run_instance [ 603.502544] env[62503]: ERROR nova.compute.manager [instance: 20bf8c62-8b80-45c2-98d4-5a960f465aa0] self.driver.spawn(context, instance, image_meta, [ 603.502544] env[62503]: ERROR nova.compute.manager [instance: 20bf8c62-8b80-45c2-98d4-5a960f465aa0] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 603.502544] env[62503]: ERROR nova.compute.manager [instance: 20bf8c62-8b80-45c2-98d4-5a960f465aa0] self._vmops.spawn(context, instance, image_meta, injected_files, [ 603.502544] env[62503]: ERROR nova.compute.manager [instance: 20bf8c62-8b80-45c2-98d4-5a960f465aa0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 603.502544] env[62503]: ERROR nova.compute.manager [instance: 20bf8c62-8b80-45c2-98d4-5a960f465aa0] vm_ref = self.build_virtual_machine(instance, [ 603.503035] env[62503]: ERROR nova.compute.manager [instance: 20bf8c62-8b80-45c2-98d4-5a960f465aa0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 603.503035] env[62503]: ERROR nova.compute.manager [instance: 20bf8c62-8b80-45c2-98d4-5a960f465aa0] vif_infos = vmwarevif.get_vif_info(self._session, [ 603.503035] env[62503]: ERROR nova.compute.manager [instance: 20bf8c62-8b80-45c2-98d4-5a960f465aa0] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 603.503035] env[62503]: ERROR nova.compute.manager [instance: 20bf8c62-8b80-45c2-98d4-5a960f465aa0] for vif in network_info: [ 603.503035] env[62503]: ERROR nova.compute.manager [instance: 20bf8c62-8b80-45c2-98d4-5a960f465aa0] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 603.503035] env[62503]: ERROR nova.compute.manager [instance: 20bf8c62-8b80-45c2-98d4-5a960f465aa0] return self._sync_wrapper(fn, *args, **kwargs) [ 603.503035] env[62503]: ERROR nova.compute.manager [instance: 20bf8c62-8b80-45c2-98d4-5a960f465aa0] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 603.503035] env[62503]: ERROR nova.compute.manager [instance: 20bf8c62-8b80-45c2-98d4-5a960f465aa0] self.wait() [ 603.503035] env[62503]: ERROR nova.compute.manager [instance: 20bf8c62-8b80-45c2-98d4-5a960f465aa0] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 603.503035] env[62503]: ERROR nova.compute.manager [instance: 20bf8c62-8b80-45c2-98d4-5a960f465aa0] self[:] = self._gt.wait() [ 603.503035] env[62503]: ERROR nova.compute.manager [instance: 20bf8c62-8b80-45c2-98d4-5a960f465aa0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 603.503035] env[62503]: ERROR nova.compute.manager [instance: 20bf8c62-8b80-45c2-98d4-5a960f465aa0] return self._exit_event.wait() [ 603.503035] env[62503]: ERROR nova.compute.manager [instance: 20bf8c62-8b80-45c2-98d4-5a960f465aa0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 603.503426] env[62503]: ERROR nova.compute.manager [instance: 20bf8c62-8b80-45c2-98d4-5a960f465aa0] result = hub.switch() [ 603.503426] env[62503]: ERROR nova.compute.manager [instance: 20bf8c62-8b80-45c2-98d4-5a960f465aa0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 603.503426] env[62503]: ERROR nova.compute.manager [instance: 20bf8c62-8b80-45c2-98d4-5a960f465aa0] return self.greenlet.switch() [ 603.503426] env[62503]: ERROR nova.compute.manager [instance: 20bf8c62-8b80-45c2-98d4-5a960f465aa0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 603.503426] env[62503]: ERROR nova.compute.manager [instance: 20bf8c62-8b80-45c2-98d4-5a960f465aa0] result = function(*args, **kwargs) [ 603.503426] env[62503]: ERROR nova.compute.manager [instance: 20bf8c62-8b80-45c2-98d4-5a960f465aa0] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 603.503426] env[62503]: ERROR nova.compute.manager [instance: 20bf8c62-8b80-45c2-98d4-5a960f465aa0] return func(*args, **kwargs) [ 603.503426] env[62503]: ERROR nova.compute.manager [instance: 20bf8c62-8b80-45c2-98d4-5a960f465aa0] File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 603.503426] env[62503]: ERROR nova.compute.manager [instance: 20bf8c62-8b80-45c2-98d4-5a960f465aa0] raise e [ 603.503426] env[62503]: ERROR nova.compute.manager [instance: 20bf8c62-8b80-45c2-98d4-5a960f465aa0] File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 603.503426] env[62503]: ERROR nova.compute.manager [instance: 20bf8c62-8b80-45c2-98d4-5a960f465aa0] nwinfo = self.network_api.allocate_for_instance( [ 603.503426] env[62503]: ERROR nova.compute.manager [instance: 20bf8c62-8b80-45c2-98d4-5a960f465aa0] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 603.503426] env[62503]: ERROR nova.compute.manager [instance: 20bf8c62-8b80-45c2-98d4-5a960f465aa0] created_port_ids = self._update_ports_for_instance( [ 603.504681] env[62503]: ERROR nova.compute.manager [instance: 20bf8c62-8b80-45c2-98d4-5a960f465aa0] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 603.504681] env[62503]: ERROR nova.compute.manager [instance: 20bf8c62-8b80-45c2-98d4-5a960f465aa0] with excutils.save_and_reraise_exception(): [ 603.504681] env[62503]: ERROR nova.compute.manager [instance: 20bf8c62-8b80-45c2-98d4-5a960f465aa0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 603.504681] env[62503]: ERROR nova.compute.manager [instance: 20bf8c62-8b80-45c2-98d4-5a960f465aa0] self.force_reraise() [ 603.504681] env[62503]: ERROR nova.compute.manager [instance: 20bf8c62-8b80-45c2-98d4-5a960f465aa0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 603.504681] env[62503]: ERROR nova.compute.manager [instance: 20bf8c62-8b80-45c2-98d4-5a960f465aa0] raise self.value [ 603.504681] env[62503]: ERROR nova.compute.manager [instance: 20bf8c62-8b80-45c2-98d4-5a960f465aa0] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 603.504681] env[62503]: ERROR nova.compute.manager [instance: 20bf8c62-8b80-45c2-98d4-5a960f465aa0] updated_port = self._update_port( [ 603.504681] env[62503]: ERROR nova.compute.manager [instance: 20bf8c62-8b80-45c2-98d4-5a960f465aa0] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 603.504681] env[62503]: ERROR nova.compute.manager [instance: 20bf8c62-8b80-45c2-98d4-5a960f465aa0] _ensure_no_port_binding_failure(port) [ 603.504681] env[62503]: ERROR nova.compute.manager [instance: 20bf8c62-8b80-45c2-98d4-5a960f465aa0] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 603.504681] env[62503]: ERROR nova.compute.manager [instance: 20bf8c62-8b80-45c2-98d4-5a960f465aa0] raise exception.PortBindingFailed(port_id=port['id']) [ 603.505340] env[62503]: ERROR nova.compute.manager [instance: 20bf8c62-8b80-45c2-98d4-5a960f465aa0] nova.exception.PortBindingFailed: Binding failed for port 3d2fc2a3-fced-4fde-98d7-29167b92782b, please check neutron logs for more information. [ 603.505340] env[62503]: ERROR nova.compute.manager [instance: 20bf8c62-8b80-45c2-98d4-5a960f465aa0] [ 603.505340] env[62503]: INFO nova.compute.manager [None req-6fd2735d-f55f-4cf9-b0ab-01612c901fa1 tempest-AttachInterfacesUnderV243Test-714583448 tempest-AttachInterfacesUnderV243Test-714583448-project-member] [instance: 20bf8c62-8b80-45c2-98d4-5a960f465aa0] Terminating instance [ 603.505340] env[62503]: DEBUG oslo_concurrency.lockutils [None req-6fd2735d-f55f-4cf9-b0ab-01612c901fa1 tempest-AttachInterfacesUnderV243Test-714583448 tempest-AttachInterfacesUnderV243Test-714583448-project-member] Acquiring lock "refresh_cache-20bf8c62-8b80-45c2-98d4-5a960f465aa0" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 603.541697] env[62503]: DEBUG nova.network.neutron [None req-011ff5ca-86ef-4873-993e-6255c4300036 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] [instance: c4a88e75-690f-4bed-a4f9-a0de3b193eff] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 603.619149] env[62503]: DEBUG nova.network.neutron [req-3f6e3c2c-4549-4159-a84f-41cb1d7147ba req-44a46264-e907-4d22-8e8c-d44565c3306e service nova] [instance: 20bf8c62-8b80-45c2-98d4-5a960f465aa0] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 603.706366] env[62503]: DEBUG nova.network.neutron [None req-011ff5ca-86ef-4873-993e-6255c4300036 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] [instance: c4a88e75-690f-4bed-a4f9-a0de3b193eff] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 603.765320] env[62503]: DEBUG nova.network.neutron [req-3f6e3c2c-4549-4159-a84f-41cb1d7147ba req-44a46264-e907-4d22-8e8c-d44565c3306e service nova] [instance: 20bf8c62-8b80-45c2-98d4-5a960f465aa0] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 603.791736] env[62503]: DEBUG nova.compute.manager [None req-4bad5bb1-7fa9-4e2b-9ed4-ab328e1c526a tempest-ServerActionsTestOtherB-427346871 tempest-ServerActionsTestOtherB-427346871-project-member] [instance: bef7d4e7-9af2-4071-ae6d-bdbfa7f46460] Start building block device mappings for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2867}} [ 603.872547] env[62503]: DEBUG nova.network.neutron [None req-4bad5bb1-7fa9-4e2b-9ed4-ab328e1c526a tempest-ServerActionsTestOtherB-427346871 tempest-ServerActionsTestOtherB-427346871-project-member] [instance: bef7d4e7-9af2-4071-ae6d-bdbfa7f46460] Successfully created port: 132a000a-ea35-46aa-8853-38a0fba77bc1 {{(pid=62503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 604.196335] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6be6585-aadb-4159-b05b-3d53e440309e {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.205547] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a8921eb-bf02-4c17-88b9-40009148d44d {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.209851] env[62503]: DEBUG oslo_concurrency.lockutils [None req-011ff5ca-86ef-4873-993e-6255c4300036 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Releasing lock "refresh_cache-c4a88e75-690f-4bed-a4f9-a0de3b193eff" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 604.211343] env[62503]: DEBUG nova.compute.manager [None req-011ff5ca-86ef-4873-993e-6255c4300036 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] [instance: c4a88e75-690f-4bed-a4f9-a0de3b193eff] Start destroying the instance on the hypervisor. {{(pid=62503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3156}} [ 604.211343] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-011ff5ca-86ef-4873-993e-6255c4300036 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] [instance: c4a88e75-690f-4bed-a4f9-a0de3b193eff] Destroying instance {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 604.211498] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7522cbdb-9b95-428d-a455-966d563fdeba {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.222184] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-011ff5ca-86ef-4873-993e-6255c4300036 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] [instance: c4a88e75-690f-4bed-a4f9-a0de3b193eff] Powering off the VM {{(pid=62503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 604.251201] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ebee6fb4-2b37-4d97-993e-337262550c1f {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.253398] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59188429-a1ee-493c-a080-453ba5d8b7aa {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.262090] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5a8a656-4eb1-4e5c-96db-c191c67a263d {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.266939] env[62503]: DEBUG oslo_vmware.api [None req-011ff5ca-86ef-4873-993e-6255c4300036 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Waiting for the task: (returnval){ [ 604.266939] env[62503]: value = "task-1387720" [ 604.266939] env[62503]: _type = "Task" [ 604.266939] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 604.281644] env[62503]: DEBUG oslo_concurrency.lockutils [req-3f6e3c2c-4549-4159-a84f-41cb1d7147ba req-44a46264-e907-4d22-8e8c-d44565c3306e service nova] Releasing lock "refresh_cache-20bf8c62-8b80-45c2-98d4-5a960f465aa0" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 604.282302] env[62503]: DEBUG nova.compute.provider_tree [None req-68ed7e64-78f9-431d-ac11-4e539144d770 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 604.285147] env[62503]: DEBUG oslo_concurrency.lockutils [None req-6fd2735d-f55f-4cf9-b0ab-01612c901fa1 tempest-AttachInterfacesUnderV243Test-714583448 tempest-AttachInterfacesUnderV243Test-714583448-project-member] Acquired lock "refresh_cache-20bf8c62-8b80-45c2-98d4-5a960f465aa0" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 604.285147] env[62503]: DEBUG nova.network.neutron [None req-6fd2735d-f55f-4cf9-b0ab-01612c901fa1 tempest-AttachInterfacesUnderV243Test-714583448 tempest-AttachInterfacesUnderV243Test-714583448-project-member] [instance: 20bf8c62-8b80-45c2-98d4-5a960f465aa0] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 604.291768] env[62503]: DEBUG oslo_vmware.api [None req-011ff5ca-86ef-4873-993e-6255c4300036 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Task: {'id': task-1387720, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 604.779384] env[62503]: DEBUG oslo_vmware.api [None req-011ff5ca-86ef-4873-993e-6255c4300036 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Task: {'id': task-1387720, 'name': PowerOffVM_Task, 'duration_secs': 0.143001} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 604.779384] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-011ff5ca-86ef-4873-993e-6255c4300036 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] [instance: c4a88e75-690f-4bed-a4f9-a0de3b193eff] Powered off the VM {{(pid=62503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 604.779384] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-011ff5ca-86ef-4873-993e-6255c4300036 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] [instance: c4a88e75-690f-4bed-a4f9-a0de3b193eff] Unregistering the VM {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 604.779384] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8c41c299-a220-45cf-9b88-0f0510630a16 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.788143] env[62503]: DEBUG nova.scheduler.client.report [None req-68ed7e64-78f9-431d-ac11-4e539144d770 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 604.809876] env[62503]: DEBUG nova.compute.manager [None req-4bad5bb1-7fa9-4e2b-9ed4-ab328e1c526a tempest-ServerActionsTestOtherB-427346871 tempest-ServerActionsTestOtherB-427346871-project-member] [instance: bef7d4e7-9af2-4071-ae6d-bdbfa7f46460] Start spawning the instance on the hypervisor. {{(pid=62503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2641}} [ 604.819270] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-011ff5ca-86ef-4873-993e-6255c4300036 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] [instance: c4a88e75-690f-4bed-a4f9-a0de3b193eff] Unregistered the VM {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 604.819270] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-011ff5ca-86ef-4873-993e-6255c4300036 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] [instance: c4a88e75-690f-4bed-a4f9-a0de3b193eff] Deleting contents of the VM from datastore datastore1 {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 604.819270] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-011ff5ca-86ef-4873-993e-6255c4300036 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Deleting the datastore file [datastore1] c4a88e75-690f-4bed-a4f9-a0de3b193eff {{(pid=62503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 604.819270] env[62503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-35218baf-96d1-4754-8eab-0718ae7b479c {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.826172] env[62503]: DEBUG nova.network.neutron [None req-6fd2735d-f55f-4cf9-b0ab-01612c901fa1 tempest-AttachInterfacesUnderV243Test-714583448 tempest-AttachInterfacesUnderV243Test-714583448-project-member] [instance: 20bf8c62-8b80-45c2-98d4-5a960f465aa0] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 604.830828] env[62503]: DEBUG oslo_vmware.api [None req-011ff5ca-86ef-4873-993e-6255c4300036 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Waiting for the task: (returnval){ [ 604.830828] env[62503]: value = "task-1387723" [ 604.830828] env[62503]: _type = "Task" [ 604.830828] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 604.841153] env[62503]: DEBUG oslo_vmware.api [None req-011ff5ca-86ef-4873-993e-6255c4300036 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Task: {'id': task-1387723, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 604.853701] env[62503]: DEBUG nova.virt.hardware [None req-4bad5bb1-7fa9-4e2b-9ed4-ab328e1c526a tempest-ServerActionsTestOtherB-427346871 tempest-ServerActionsTestOtherB-427346871-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:26:20Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:26:03Z,direct_url=,disk_format='vmdk',id=8150ca02-f879-471d-8913-459408f127a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26d9ee75d25b4018b8bb1fb11c8bd98c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:26:04Z,virtual_size=,visibility=), allow threads: False {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 604.853995] env[62503]: DEBUG nova.virt.hardware [None req-4bad5bb1-7fa9-4e2b-9ed4-ab328e1c526a tempest-ServerActionsTestOtherB-427346871 tempest-ServerActionsTestOtherB-427346871-project-member] Flavor limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 604.854227] env[62503]: DEBUG nova.virt.hardware [None req-4bad5bb1-7fa9-4e2b-9ed4-ab328e1c526a tempest-ServerActionsTestOtherB-427346871 tempest-ServerActionsTestOtherB-427346871-project-member] Image limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 604.854403] env[62503]: DEBUG nova.virt.hardware [None req-4bad5bb1-7fa9-4e2b-9ed4-ab328e1c526a tempest-ServerActionsTestOtherB-427346871 tempest-ServerActionsTestOtherB-427346871-project-member] Flavor pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 604.854567] env[62503]: DEBUG nova.virt.hardware [None req-4bad5bb1-7fa9-4e2b-9ed4-ab328e1c526a tempest-ServerActionsTestOtherB-427346871 tempest-ServerActionsTestOtherB-427346871-project-member] Image pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 604.854700] env[62503]: DEBUG nova.virt.hardware [None req-4bad5bb1-7fa9-4e2b-9ed4-ab328e1c526a tempest-ServerActionsTestOtherB-427346871 tempest-ServerActionsTestOtherB-427346871-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 604.854930] env[62503]: DEBUG nova.virt.hardware [None req-4bad5bb1-7fa9-4e2b-9ed4-ab328e1c526a tempest-ServerActionsTestOtherB-427346871 tempest-ServerActionsTestOtherB-427346871-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 604.855073] env[62503]: DEBUG nova.virt.hardware [None req-4bad5bb1-7fa9-4e2b-9ed4-ab328e1c526a tempest-ServerActionsTestOtherB-427346871 tempest-ServerActionsTestOtherB-427346871-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 604.855230] env[62503]: DEBUG nova.virt.hardware [None req-4bad5bb1-7fa9-4e2b-9ed4-ab328e1c526a tempest-ServerActionsTestOtherB-427346871 tempest-ServerActionsTestOtherB-427346871-project-member] Got 1 possible topologies {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 604.855415] env[62503]: DEBUG nova.virt.hardware [None req-4bad5bb1-7fa9-4e2b-9ed4-ab328e1c526a tempest-ServerActionsTestOtherB-427346871 tempest-ServerActionsTestOtherB-427346871-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 604.855608] env[62503]: DEBUG nova.virt.hardware [None req-4bad5bb1-7fa9-4e2b-9ed4-ab328e1c526a tempest-ServerActionsTestOtherB-427346871 tempest-ServerActionsTestOtherB-427346871-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 604.856719] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-708df42a-08b4-4358-a49d-65e4e4992a08 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.870645] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84a471b2-7f10-4f4c-b6a7-75138d32152c {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.900441] env[62503]: DEBUG nova.compute.manager [req-6dd9ac0a-4e9c-4123-96f4-ab302148188d req-f8f3c40c-a33c-410d-8f5c-66dfab37faa1 service nova] [instance: bef7d4e7-9af2-4071-ae6d-bdbfa7f46460] Received event network-changed-132a000a-ea35-46aa-8853-38a0fba77bc1 {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 604.901019] env[62503]: DEBUG nova.compute.manager [req-6dd9ac0a-4e9c-4123-96f4-ab302148188d req-f8f3c40c-a33c-410d-8f5c-66dfab37faa1 service nova] [instance: bef7d4e7-9af2-4071-ae6d-bdbfa7f46460] Refreshing instance network info cache due to event network-changed-132a000a-ea35-46aa-8853-38a0fba77bc1. {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11432}} [ 604.901215] env[62503]: DEBUG oslo_concurrency.lockutils [req-6dd9ac0a-4e9c-4123-96f4-ab302148188d req-f8f3c40c-a33c-410d-8f5c-66dfab37faa1 service nova] Acquiring lock "refresh_cache-bef7d4e7-9af2-4071-ae6d-bdbfa7f46460" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 604.902846] env[62503]: DEBUG oslo_concurrency.lockutils [req-6dd9ac0a-4e9c-4123-96f4-ab302148188d req-f8f3c40c-a33c-410d-8f5c-66dfab37faa1 service nova] Acquired lock "refresh_cache-bef7d4e7-9af2-4071-ae6d-bdbfa7f46460" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 604.902846] env[62503]: DEBUG nova.network.neutron [req-6dd9ac0a-4e9c-4123-96f4-ab302148188d req-f8f3c40c-a33c-410d-8f5c-66dfab37faa1 service nova] [instance: bef7d4e7-9af2-4071-ae6d-bdbfa7f46460] Refreshing network info cache for port 132a000a-ea35-46aa-8853-38a0fba77bc1 {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 605.005644] env[62503]: DEBUG nova.network.neutron [None req-6fd2735d-f55f-4cf9-b0ab-01612c901fa1 tempest-AttachInterfacesUnderV243Test-714583448 tempest-AttachInterfacesUnderV243Test-714583448-project-member] [instance: 20bf8c62-8b80-45c2-98d4-5a960f465aa0] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 605.047446] env[62503]: DEBUG oslo_concurrency.lockutils [None req-9a47e67e-9a92-496f-b283-39f380d35bee tempest-ServerAddressesTestJSON-1019310862 tempest-ServerAddressesTestJSON-1019310862-project-member] Acquiring lock "ce8d9b01-e99d-4051-bd96-659692a436da" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 605.047446] env[62503]: DEBUG oslo_concurrency.lockutils [None req-9a47e67e-9a92-496f-b283-39f380d35bee tempest-ServerAddressesTestJSON-1019310862 tempest-ServerAddressesTestJSON-1019310862-project-member] Lock "ce8d9b01-e99d-4051-bd96-659692a436da" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 605.139439] env[62503]: DEBUG nova.compute.manager [req-29436987-019d-4d05-9123-6dbbf05e65e8 req-92c199db-acd5-42cc-a6fe-36c1e2ea91da service nova] [instance: 20bf8c62-8b80-45c2-98d4-5a960f465aa0] Received event network-vif-deleted-3d2fc2a3-fced-4fde-98d7-29167b92782b {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 605.256354] env[62503]: ERROR nova.compute.manager [None req-4bad5bb1-7fa9-4e2b-9ed4-ab328e1c526a tempest-ServerActionsTestOtherB-427346871 tempest-ServerActionsTestOtherB-427346871-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 132a000a-ea35-46aa-8853-38a0fba77bc1, please check neutron logs for more information. [ 605.256354] env[62503]: ERROR nova.compute.manager Traceback (most recent call last): [ 605.256354] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 605.256354] env[62503]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 605.256354] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 605.256354] env[62503]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 605.256354] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 605.256354] env[62503]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 605.256354] env[62503]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 605.256354] env[62503]: ERROR nova.compute.manager self.force_reraise() [ 605.256354] env[62503]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 605.256354] env[62503]: ERROR nova.compute.manager raise self.value [ 605.256354] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 605.256354] env[62503]: ERROR nova.compute.manager updated_port = self._update_port( [ 605.256354] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 605.256354] env[62503]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 605.256852] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 605.256852] env[62503]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 605.256852] env[62503]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 132a000a-ea35-46aa-8853-38a0fba77bc1, please check neutron logs for more information. [ 605.256852] env[62503]: ERROR nova.compute.manager [ 605.256852] env[62503]: Traceback (most recent call last): [ 605.256852] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 605.256852] env[62503]: listener.cb(fileno) [ 605.256852] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 605.256852] env[62503]: result = function(*args, **kwargs) [ 605.256852] env[62503]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 605.256852] env[62503]: return func(*args, **kwargs) [ 605.256852] env[62503]: File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 605.256852] env[62503]: raise e [ 605.256852] env[62503]: File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 605.256852] env[62503]: nwinfo = self.network_api.allocate_for_instance( [ 605.256852] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 605.256852] env[62503]: created_port_ids = self._update_ports_for_instance( [ 605.256852] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 605.256852] env[62503]: with excutils.save_and_reraise_exception(): [ 605.256852] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 605.256852] env[62503]: self.force_reraise() [ 605.256852] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 605.256852] env[62503]: raise self.value [ 605.256852] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 605.256852] env[62503]: updated_port = self._update_port( [ 605.256852] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 605.256852] env[62503]: _ensure_no_port_binding_failure(port) [ 605.256852] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 605.256852] env[62503]: raise exception.PortBindingFailed(port_id=port['id']) [ 605.257663] env[62503]: nova.exception.PortBindingFailed: Binding failed for port 132a000a-ea35-46aa-8853-38a0fba77bc1, please check neutron logs for more information. [ 605.257663] env[62503]: Removing descriptor: 14 [ 605.257663] env[62503]: ERROR nova.compute.manager [None req-4bad5bb1-7fa9-4e2b-9ed4-ab328e1c526a tempest-ServerActionsTestOtherB-427346871 tempest-ServerActionsTestOtherB-427346871-project-member] [instance: bef7d4e7-9af2-4071-ae6d-bdbfa7f46460] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 132a000a-ea35-46aa-8853-38a0fba77bc1, please check neutron logs for more information. [ 605.257663] env[62503]: ERROR nova.compute.manager [instance: bef7d4e7-9af2-4071-ae6d-bdbfa7f46460] Traceback (most recent call last): [ 605.257663] env[62503]: ERROR nova.compute.manager [instance: bef7d4e7-9af2-4071-ae6d-bdbfa7f46460] File "/opt/stack/nova/nova/compute/manager.py", line 2897, in _build_resources [ 605.257663] env[62503]: ERROR nova.compute.manager [instance: bef7d4e7-9af2-4071-ae6d-bdbfa7f46460] yield resources [ 605.257663] env[62503]: ERROR nova.compute.manager [instance: bef7d4e7-9af2-4071-ae6d-bdbfa7f46460] File "/opt/stack/nova/nova/compute/manager.py", line 2644, in _build_and_run_instance [ 605.257663] env[62503]: ERROR nova.compute.manager [instance: bef7d4e7-9af2-4071-ae6d-bdbfa7f46460] self.driver.spawn(context, instance, image_meta, [ 605.257663] env[62503]: ERROR nova.compute.manager [instance: bef7d4e7-9af2-4071-ae6d-bdbfa7f46460] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 605.257663] env[62503]: ERROR nova.compute.manager [instance: bef7d4e7-9af2-4071-ae6d-bdbfa7f46460] self._vmops.spawn(context, instance, image_meta, injected_files, [ 605.257663] env[62503]: ERROR nova.compute.manager [instance: bef7d4e7-9af2-4071-ae6d-bdbfa7f46460] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 605.257663] env[62503]: ERROR nova.compute.manager [instance: bef7d4e7-9af2-4071-ae6d-bdbfa7f46460] vm_ref = self.build_virtual_machine(instance, [ 605.258029] env[62503]: ERROR nova.compute.manager [instance: bef7d4e7-9af2-4071-ae6d-bdbfa7f46460] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 605.258029] env[62503]: ERROR nova.compute.manager [instance: bef7d4e7-9af2-4071-ae6d-bdbfa7f46460] vif_infos = vmwarevif.get_vif_info(self._session, [ 605.258029] env[62503]: ERROR nova.compute.manager [instance: bef7d4e7-9af2-4071-ae6d-bdbfa7f46460] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 605.258029] env[62503]: ERROR nova.compute.manager [instance: bef7d4e7-9af2-4071-ae6d-bdbfa7f46460] for vif in network_info: [ 605.258029] env[62503]: ERROR nova.compute.manager [instance: bef7d4e7-9af2-4071-ae6d-bdbfa7f46460] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 605.258029] env[62503]: ERROR nova.compute.manager [instance: bef7d4e7-9af2-4071-ae6d-bdbfa7f46460] return self._sync_wrapper(fn, *args, **kwargs) [ 605.258029] env[62503]: ERROR nova.compute.manager [instance: bef7d4e7-9af2-4071-ae6d-bdbfa7f46460] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 605.258029] env[62503]: ERROR nova.compute.manager [instance: bef7d4e7-9af2-4071-ae6d-bdbfa7f46460] self.wait() [ 605.258029] env[62503]: ERROR nova.compute.manager [instance: bef7d4e7-9af2-4071-ae6d-bdbfa7f46460] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 605.258029] env[62503]: ERROR nova.compute.manager [instance: bef7d4e7-9af2-4071-ae6d-bdbfa7f46460] self[:] = self._gt.wait() [ 605.258029] env[62503]: ERROR nova.compute.manager [instance: bef7d4e7-9af2-4071-ae6d-bdbfa7f46460] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 605.258029] env[62503]: ERROR nova.compute.manager [instance: bef7d4e7-9af2-4071-ae6d-bdbfa7f46460] return self._exit_event.wait() [ 605.258029] env[62503]: ERROR nova.compute.manager [instance: bef7d4e7-9af2-4071-ae6d-bdbfa7f46460] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 605.258394] env[62503]: ERROR nova.compute.manager [instance: bef7d4e7-9af2-4071-ae6d-bdbfa7f46460] result = hub.switch() [ 605.258394] env[62503]: ERROR nova.compute.manager [instance: bef7d4e7-9af2-4071-ae6d-bdbfa7f46460] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 605.258394] env[62503]: ERROR nova.compute.manager [instance: bef7d4e7-9af2-4071-ae6d-bdbfa7f46460] return self.greenlet.switch() [ 605.258394] env[62503]: ERROR nova.compute.manager [instance: bef7d4e7-9af2-4071-ae6d-bdbfa7f46460] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 605.258394] env[62503]: ERROR nova.compute.manager [instance: bef7d4e7-9af2-4071-ae6d-bdbfa7f46460] result = function(*args, **kwargs) [ 605.258394] env[62503]: ERROR nova.compute.manager [instance: bef7d4e7-9af2-4071-ae6d-bdbfa7f46460] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 605.258394] env[62503]: ERROR nova.compute.manager [instance: bef7d4e7-9af2-4071-ae6d-bdbfa7f46460] return func(*args, **kwargs) [ 605.258394] env[62503]: ERROR nova.compute.manager [instance: bef7d4e7-9af2-4071-ae6d-bdbfa7f46460] File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 605.258394] env[62503]: ERROR nova.compute.manager [instance: bef7d4e7-9af2-4071-ae6d-bdbfa7f46460] raise e [ 605.258394] env[62503]: ERROR nova.compute.manager [instance: bef7d4e7-9af2-4071-ae6d-bdbfa7f46460] File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 605.258394] env[62503]: ERROR nova.compute.manager [instance: bef7d4e7-9af2-4071-ae6d-bdbfa7f46460] nwinfo = self.network_api.allocate_for_instance( [ 605.258394] env[62503]: ERROR nova.compute.manager [instance: bef7d4e7-9af2-4071-ae6d-bdbfa7f46460] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 605.258394] env[62503]: ERROR nova.compute.manager [instance: bef7d4e7-9af2-4071-ae6d-bdbfa7f46460] created_port_ids = self._update_ports_for_instance( [ 605.258747] env[62503]: ERROR nova.compute.manager [instance: bef7d4e7-9af2-4071-ae6d-bdbfa7f46460] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 605.258747] env[62503]: ERROR nova.compute.manager [instance: bef7d4e7-9af2-4071-ae6d-bdbfa7f46460] with excutils.save_and_reraise_exception(): [ 605.258747] env[62503]: ERROR nova.compute.manager [instance: bef7d4e7-9af2-4071-ae6d-bdbfa7f46460] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 605.258747] env[62503]: ERROR nova.compute.manager [instance: bef7d4e7-9af2-4071-ae6d-bdbfa7f46460] self.force_reraise() [ 605.258747] env[62503]: ERROR nova.compute.manager [instance: bef7d4e7-9af2-4071-ae6d-bdbfa7f46460] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 605.258747] env[62503]: ERROR nova.compute.manager [instance: bef7d4e7-9af2-4071-ae6d-bdbfa7f46460] raise self.value [ 605.258747] env[62503]: ERROR nova.compute.manager [instance: bef7d4e7-9af2-4071-ae6d-bdbfa7f46460] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 605.258747] env[62503]: ERROR nova.compute.manager [instance: bef7d4e7-9af2-4071-ae6d-bdbfa7f46460] updated_port = self._update_port( [ 605.258747] env[62503]: ERROR nova.compute.manager [instance: bef7d4e7-9af2-4071-ae6d-bdbfa7f46460] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 605.258747] env[62503]: ERROR nova.compute.manager [instance: bef7d4e7-9af2-4071-ae6d-bdbfa7f46460] _ensure_no_port_binding_failure(port) [ 605.258747] env[62503]: ERROR nova.compute.manager [instance: bef7d4e7-9af2-4071-ae6d-bdbfa7f46460] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 605.258747] env[62503]: ERROR nova.compute.manager [instance: bef7d4e7-9af2-4071-ae6d-bdbfa7f46460] raise exception.PortBindingFailed(port_id=port['id']) [ 605.259094] env[62503]: ERROR nova.compute.manager [instance: bef7d4e7-9af2-4071-ae6d-bdbfa7f46460] nova.exception.PortBindingFailed: Binding failed for port 132a000a-ea35-46aa-8853-38a0fba77bc1, please check neutron logs for more information. [ 605.259094] env[62503]: ERROR nova.compute.manager [instance: bef7d4e7-9af2-4071-ae6d-bdbfa7f46460] [ 605.259094] env[62503]: INFO nova.compute.manager [None req-4bad5bb1-7fa9-4e2b-9ed4-ab328e1c526a tempest-ServerActionsTestOtherB-427346871 tempest-ServerActionsTestOtherB-427346871-project-member] [instance: bef7d4e7-9af2-4071-ae6d-bdbfa7f46460] Terminating instance [ 605.259218] env[62503]: DEBUG oslo_concurrency.lockutils [None req-4bad5bb1-7fa9-4e2b-9ed4-ab328e1c526a tempest-ServerActionsTestOtherB-427346871 tempest-ServerActionsTestOtherB-427346871-project-member] Acquiring lock "refresh_cache-bef7d4e7-9af2-4071-ae6d-bdbfa7f46460" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 605.293156] env[62503]: DEBUG oslo_concurrency.lockutils [None req-68ed7e64-78f9-431d-ac11-4e539144d770 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.517s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 605.293684] env[62503]: DEBUG nova.compute.manager [None req-68ed7e64-78f9-431d-ac11-4e539144d770 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: eed1dcc3-d9f9-4211-a4c3-850dcdad72b1] Start building networks asynchronously for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2832}} [ 605.297028] env[62503]: DEBUG oslo_concurrency.lockutils [None req-6127b9b5-a976-4771-896a-db01ec307bda tempest-ImagesOneServerTestJSON-1249934967 tempest-ImagesOneServerTestJSON-1249934967-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.671s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 605.298975] env[62503]: INFO nova.compute.claims [None req-6127b9b5-a976-4771-896a-db01ec307bda tempest-ImagesOneServerTestJSON-1249934967 tempest-ImagesOneServerTestJSON-1249934967-project-member] [instance: 1a27278b-b930-4432-90f2-45cdf025c83e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 605.342831] env[62503]: DEBUG oslo_vmware.api [None req-011ff5ca-86ef-4873-993e-6255c4300036 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Task: {'id': task-1387723, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.109154} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 605.343148] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-011ff5ca-86ef-4873-993e-6255c4300036 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Deleted the datastore file {{(pid=62503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 605.343343] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-011ff5ca-86ef-4873-993e-6255c4300036 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] [instance: c4a88e75-690f-4bed-a4f9-a0de3b193eff] Deleted contents of the VM from datastore datastore1 {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 605.343521] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-011ff5ca-86ef-4873-993e-6255c4300036 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] [instance: c4a88e75-690f-4bed-a4f9-a0de3b193eff] Instance destroyed {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 605.343694] env[62503]: INFO nova.compute.manager [None req-011ff5ca-86ef-4873-993e-6255c4300036 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] [instance: c4a88e75-690f-4bed-a4f9-a0de3b193eff] Took 1.13 seconds to destroy the instance on the hypervisor. [ 605.343940] env[62503]: DEBUG oslo.service.loopingcall [None req-011ff5ca-86ef-4873-993e-6255c4300036 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 605.344171] env[62503]: DEBUG nova.compute.manager [-] [instance: c4a88e75-690f-4bed-a4f9-a0de3b193eff] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 605.344276] env[62503]: DEBUG nova.network.neutron [-] [instance: c4a88e75-690f-4bed-a4f9-a0de3b193eff] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 605.361573] env[62503]: DEBUG nova.network.neutron [-] [instance: c4a88e75-690f-4bed-a4f9-a0de3b193eff] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 605.420016] env[62503]: DEBUG nova.network.neutron [req-6dd9ac0a-4e9c-4123-96f4-ab302148188d req-f8f3c40c-a33c-410d-8f5c-66dfab37faa1 service nova] [instance: bef7d4e7-9af2-4071-ae6d-bdbfa7f46460] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 605.507458] env[62503]: DEBUG nova.network.neutron [req-6dd9ac0a-4e9c-4123-96f4-ab302148188d req-f8f3c40c-a33c-410d-8f5c-66dfab37faa1 service nova] [instance: bef7d4e7-9af2-4071-ae6d-bdbfa7f46460] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 605.508844] env[62503]: DEBUG oslo_concurrency.lockutils [None req-6fd2735d-f55f-4cf9-b0ab-01612c901fa1 tempest-AttachInterfacesUnderV243Test-714583448 tempest-AttachInterfacesUnderV243Test-714583448-project-member] Releasing lock "refresh_cache-20bf8c62-8b80-45c2-98d4-5a960f465aa0" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 605.509266] env[62503]: DEBUG nova.compute.manager [None req-6fd2735d-f55f-4cf9-b0ab-01612c901fa1 tempest-AttachInterfacesUnderV243Test-714583448 tempest-AttachInterfacesUnderV243Test-714583448-project-member] [instance: 20bf8c62-8b80-45c2-98d4-5a960f465aa0] Start destroying the instance on the hypervisor. {{(pid=62503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3156}} [ 605.509481] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-6fd2735d-f55f-4cf9-b0ab-01612c901fa1 tempest-AttachInterfacesUnderV243Test-714583448 tempest-AttachInterfacesUnderV243Test-714583448-project-member] [instance: 20bf8c62-8b80-45c2-98d4-5a960f465aa0] Destroying instance {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 605.509756] env[62503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-bb8f0932-bf41-4988-b8e5-697d7a87233f {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.525446] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67531062-20d7-4aa3-9273-95bf0e8ee401 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.555218] env[62503]: WARNING nova.virt.vmwareapi.vmops [None req-6fd2735d-f55f-4cf9-b0ab-01612c901fa1 tempest-AttachInterfacesUnderV243Test-714583448 tempest-AttachInterfacesUnderV243Test-714583448-project-member] [instance: 20bf8c62-8b80-45c2-98d4-5a960f465aa0] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 20bf8c62-8b80-45c2-98d4-5a960f465aa0 could not be found. [ 605.557024] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-6fd2735d-f55f-4cf9-b0ab-01612c901fa1 tempest-AttachInterfacesUnderV243Test-714583448 tempest-AttachInterfacesUnderV243Test-714583448-project-member] [instance: 20bf8c62-8b80-45c2-98d4-5a960f465aa0] Instance destroyed {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 605.557024] env[62503]: INFO nova.compute.manager [None req-6fd2735d-f55f-4cf9-b0ab-01612c901fa1 tempest-AttachInterfacesUnderV243Test-714583448 tempest-AttachInterfacesUnderV243Test-714583448-project-member] [instance: 20bf8c62-8b80-45c2-98d4-5a960f465aa0] Took 0.05 seconds to destroy the instance on the hypervisor. [ 605.557024] env[62503]: DEBUG oslo.service.loopingcall [None req-6fd2735d-f55f-4cf9-b0ab-01612c901fa1 tempest-AttachInterfacesUnderV243Test-714583448 tempest-AttachInterfacesUnderV243Test-714583448-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 605.557024] env[62503]: DEBUG nova.compute.manager [-] [instance: 20bf8c62-8b80-45c2-98d4-5a960f465aa0] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 605.557024] env[62503]: DEBUG nova.network.neutron [-] [instance: 20bf8c62-8b80-45c2-98d4-5a960f465aa0] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 605.584167] env[62503]: DEBUG nova.network.neutron [-] [instance: 20bf8c62-8b80-45c2-98d4-5a960f465aa0] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 605.799277] env[62503]: DEBUG nova.compute.utils [None req-68ed7e64-78f9-431d-ac11-4e539144d770 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Using /dev/sd instead of None {{(pid=62503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 605.800475] env[62503]: DEBUG nova.compute.manager [None req-68ed7e64-78f9-431d-ac11-4e539144d770 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: eed1dcc3-d9f9-4211-a4c3-850dcdad72b1] Allocating IP information in the background. {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 605.800645] env[62503]: DEBUG nova.network.neutron [None req-68ed7e64-78f9-431d-ac11-4e539144d770 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: eed1dcc3-d9f9-4211-a4c3-850dcdad72b1] allocate_for_instance() {{(pid=62503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 605.852695] env[62503]: DEBUG nova.policy [None req-68ed7e64-78f9-431d-ac11-4e539144d770 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0b072e4c8ef94b26895d59ede518aaad', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0849093c8b48400a8e9d56171ea99e8f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62503) authorize /opt/stack/nova/nova/policy.py:201}} [ 605.864366] env[62503]: DEBUG nova.network.neutron [-] [instance: c4a88e75-690f-4bed-a4f9-a0de3b193eff] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 606.018565] env[62503]: DEBUG oslo_concurrency.lockutils [req-6dd9ac0a-4e9c-4123-96f4-ab302148188d req-f8f3c40c-a33c-410d-8f5c-66dfab37faa1 service nova] Releasing lock "refresh_cache-bef7d4e7-9af2-4071-ae6d-bdbfa7f46460" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 606.019010] env[62503]: DEBUG oslo_concurrency.lockutils [None req-4bad5bb1-7fa9-4e2b-9ed4-ab328e1c526a tempest-ServerActionsTestOtherB-427346871 tempest-ServerActionsTestOtherB-427346871-project-member] Acquired lock "refresh_cache-bef7d4e7-9af2-4071-ae6d-bdbfa7f46460" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 606.019206] env[62503]: DEBUG nova.network.neutron [None req-4bad5bb1-7fa9-4e2b-9ed4-ab328e1c526a tempest-ServerActionsTestOtherB-427346871 tempest-ServerActionsTestOtherB-427346871-project-member] [instance: bef7d4e7-9af2-4071-ae6d-bdbfa7f46460] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 606.087500] env[62503]: DEBUG nova.network.neutron [-] [instance: 20bf8c62-8b80-45c2-98d4-5a960f465aa0] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 606.135959] env[62503]: DEBUG nova.network.neutron [None req-68ed7e64-78f9-431d-ac11-4e539144d770 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: eed1dcc3-d9f9-4211-a4c3-850dcdad72b1] Successfully created port: f4b3539e-f084-44c5-a8e5-9aa2e32329cd {{(pid=62503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 606.308124] env[62503]: DEBUG nova.compute.manager [None req-68ed7e64-78f9-431d-ac11-4e539144d770 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: eed1dcc3-d9f9-4211-a4c3-850dcdad72b1] Start building block device mappings for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2867}} [ 606.368773] env[62503]: INFO nova.compute.manager [-] [instance: c4a88e75-690f-4bed-a4f9-a0de3b193eff] Took 1.02 seconds to deallocate network for instance. [ 606.542025] env[62503]: DEBUG nova.network.neutron [None req-4bad5bb1-7fa9-4e2b-9ed4-ab328e1c526a tempest-ServerActionsTestOtherB-427346871 tempest-ServerActionsTestOtherB-427346871-project-member] [instance: bef7d4e7-9af2-4071-ae6d-bdbfa7f46460] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 606.593546] env[62503]: INFO nova.compute.manager [-] [instance: 20bf8c62-8b80-45c2-98d4-5a960f465aa0] Took 1.03 seconds to deallocate network for instance. [ 606.594271] env[62503]: DEBUG nova.compute.claims [None req-6fd2735d-f55f-4cf9-b0ab-01612c901fa1 tempest-AttachInterfacesUnderV243Test-714583448 tempest-AttachInterfacesUnderV243Test-714583448-project-member] [instance: 20bf8c62-8b80-45c2-98d4-5a960f465aa0] Aborting claim: {{(pid=62503) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 606.594393] env[62503]: DEBUG oslo_concurrency.lockutils [None req-6fd2735d-f55f-4cf9-b0ab-01612c901fa1 tempest-AttachInterfacesUnderV243Test-714583448 tempest-AttachInterfacesUnderV243Test-714583448-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 606.648243] env[62503]: DEBUG nova.network.neutron [None req-4bad5bb1-7fa9-4e2b-9ed4-ab328e1c526a tempest-ServerActionsTestOtherB-427346871 tempest-ServerActionsTestOtherB-427346871-project-member] [instance: bef7d4e7-9af2-4071-ae6d-bdbfa7f46460] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 606.797460] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87f0eadb-4aae-46bf-a50e-9cbb3e2a1b1a {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.806345] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b4e27e3-3d94-42d0-b9e7-cd1091063174 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.840393] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-824bcfca-dfeb-432e-b902-dd51bacdabc6 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.849736] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60176756-b1ff-4d07-b9b3-2e7f34666b83 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.863283] env[62503]: DEBUG nova.compute.provider_tree [None req-6127b9b5-a976-4771-896a-db01ec307bda tempest-ImagesOneServerTestJSON-1249934967 tempest-ImagesOneServerTestJSON-1249934967-project-member] Updating inventory in ProviderTree for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 606.881137] env[62503]: DEBUG oslo_concurrency.lockutils [None req-011ff5ca-86ef-4873-993e-6255c4300036 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 606.986410] env[62503]: DEBUG nova.compute.manager [req-3022a495-e16f-4930-a85f-a3889b5c6527 req-01d257ff-4b71-4a66-bfa3-05c1a09fe2e5 service nova] [instance: bef7d4e7-9af2-4071-ae6d-bdbfa7f46460] Received event network-vif-deleted-132a000a-ea35-46aa-8853-38a0fba77bc1 {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 607.154435] env[62503]: DEBUG oslo_concurrency.lockutils [None req-4bad5bb1-7fa9-4e2b-9ed4-ab328e1c526a tempest-ServerActionsTestOtherB-427346871 tempest-ServerActionsTestOtherB-427346871-project-member] Releasing lock "refresh_cache-bef7d4e7-9af2-4071-ae6d-bdbfa7f46460" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 607.154675] env[62503]: DEBUG nova.compute.manager [None req-4bad5bb1-7fa9-4e2b-9ed4-ab328e1c526a tempest-ServerActionsTestOtherB-427346871 tempest-ServerActionsTestOtherB-427346871-project-member] [instance: bef7d4e7-9af2-4071-ae6d-bdbfa7f46460] Start destroying the instance on the hypervisor. {{(pid=62503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3156}} [ 607.154860] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-4bad5bb1-7fa9-4e2b-9ed4-ab328e1c526a tempest-ServerActionsTestOtherB-427346871 tempest-ServerActionsTestOtherB-427346871-project-member] [instance: bef7d4e7-9af2-4071-ae6d-bdbfa7f46460] Destroying instance {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 607.155702] env[62503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b9e6c077-6953-4726-acc9-06b6aea08374 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.168282] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ba45d7b-c6a1-465a-bc16-92317bfff4f5 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.196181] env[62503]: WARNING nova.virt.vmwareapi.vmops [None req-4bad5bb1-7fa9-4e2b-9ed4-ab328e1c526a tempest-ServerActionsTestOtherB-427346871 tempest-ServerActionsTestOtherB-427346871-project-member] [instance: bef7d4e7-9af2-4071-ae6d-bdbfa7f46460] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance bef7d4e7-9af2-4071-ae6d-bdbfa7f46460 could not be found. [ 607.196435] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-4bad5bb1-7fa9-4e2b-9ed4-ab328e1c526a tempest-ServerActionsTestOtherB-427346871 tempest-ServerActionsTestOtherB-427346871-project-member] [instance: bef7d4e7-9af2-4071-ae6d-bdbfa7f46460] Instance destroyed {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 607.196620] env[62503]: INFO nova.compute.manager [None req-4bad5bb1-7fa9-4e2b-9ed4-ab328e1c526a tempest-ServerActionsTestOtherB-427346871 tempest-ServerActionsTestOtherB-427346871-project-member] [instance: bef7d4e7-9af2-4071-ae6d-bdbfa7f46460] Took 0.04 seconds to destroy the instance on the hypervisor. [ 607.196904] env[62503]: DEBUG oslo.service.loopingcall [None req-4bad5bb1-7fa9-4e2b-9ed4-ab328e1c526a tempest-ServerActionsTestOtherB-427346871 tempest-ServerActionsTestOtherB-427346871-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 607.200849] env[62503]: DEBUG nova.compute.manager [-] [instance: bef7d4e7-9af2-4071-ae6d-bdbfa7f46460] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 607.200849] env[62503]: DEBUG nova.network.neutron [-] [instance: bef7d4e7-9af2-4071-ae6d-bdbfa7f46460] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 607.219865] env[62503]: DEBUG nova.network.neutron [-] [instance: bef7d4e7-9af2-4071-ae6d-bdbfa7f46460] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 607.346408] env[62503]: DEBUG nova.compute.manager [None req-68ed7e64-78f9-431d-ac11-4e539144d770 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: eed1dcc3-d9f9-4211-a4c3-850dcdad72b1] Start spawning the instance on the hypervisor. {{(pid=62503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2641}} [ 607.391168] env[62503]: DEBUG nova.virt.hardware [None req-68ed7e64-78f9-431d-ac11-4e539144d770 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:26:20Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:26:03Z,direct_url=,disk_format='vmdk',id=8150ca02-f879-471d-8913-459408f127a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26d9ee75d25b4018b8bb1fb11c8bd98c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:26:04Z,virtual_size=,visibility=), allow threads: False {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 607.391168] env[62503]: DEBUG nova.virt.hardware [None req-68ed7e64-78f9-431d-ac11-4e539144d770 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Flavor limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 607.391168] env[62503]: DEBUG nova.virt.hardware [None req-68ed7e64-78f9-431d-ac11-4e539144d770 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Image limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 607.391401] env[62503]: DEBUG nova.virt.hardware [None req-68ed7e64-78f9-431d-ac11-4e539144d770 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Flavor pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 607.391401] env[62503]: DEBUG nova.virt.hardware [None req-68ed7e64-78f9-431d-ac11-4e539144d770 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Image pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 607.391401] env[62503]: DEBUG nova.virt.hardware [None req-68ed7e64-78f9-431d-ac11-4e539144d770 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 607.391504] env[62503]: DEBUG nova.virt.hardware [None req-68ed7e64-78f9-431d-ac11-4e539144d770 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 607.391594] env[62503]: DEBUG nova.virt.hardware [None req-68ed7e64-78f9-431d-ac11-4e539144d770 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 607.391752] env[62503]: DEBUG nova.virt.hardware [None req-68ed7e64-78f9-431d-ac11-4e539144d770 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Got 1 possible topologies {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 607.391905] env[62503]: DEBUG nova.virt.hardware [None req-68ed7e64-78f9-431d-ac11-4e539144d770 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 607.394180] env[62503]: DEBUG nova.virt.hardware [None req-68ed7e64-78f9-431d-ac11-4e539144d770 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 607.395087] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7d5d064-3498-4df9-99cc-ad3d0776b6b9 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.404311] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dff47314-0fc4-4c3c-871e-3a7e82b61372 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.408812] env[62503]: ERROR nova.scheduler.client.report [None req-6127b9b5-a976-4771-896a-db01ec307bda tempest-ImagesOneServerTestJSON-1249934967 tempest-ImagesOneServerTestJSON-1249934967-project-member] [req-6479b59d-d570-419e-affc-e28b83aba51e] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-6479b59d-d570-419e-affc-e28b83aba51e"}]} [ 607.426400] env[62503]: ERROR nova.compute.manager [None req-68ed7e64-78f9-431d-ac11-4e539144d770 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port f4b3539e-f084-44c5-a8e5-9aa2e32329cd, please check neutron logs for more information. [ 607.426400] env[62503]: ERROR nova.compute.manager Traceback (most recent call last): [ 607.426400] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 607.426400] env[62503]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 607.426400] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 607.426400] env[62503]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 607.426400] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 607.426400] env[62503]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 607.426400] env[62503]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 607.426400] env[62503]: ERROR nova.compute.manager self.force_reraise() [ 607.426400] env[62503]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 607.426400] env[62503]: ERROR nova.compute.manager raise self.value [ 607.426400] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 607.426400] env[62503]: ERROR nova.compute.manager updated_port = self._update_port( [ 607.426400] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 607.426400] env[62503]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 607.427750] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 607.427750] env[62503]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 607.427750] env[62503]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port f4b3539e-f084-44c5-a8e5-9aa2e32329cd, please check neutron logs for more information. [ 607.427750] env[62503]: ERROR nova.compute.manager [ 607.427750] env[62503]: Traceback (most recent call last): [ 607.427750] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 607.427750] env[62503]: listener.cb(fileno) [ 607.427750] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 607.427750] env[62503]: result = function(*args, **kwargs) [ 607.427750] env[62503]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 607.427750] env[62503]: return func(*args, **kwargs) [ 607.427750] env[62503]: File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 607.427750] env[62503]: raise e [ 607.427750] env[62503]: File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 607.427750] env[62503]: nwinfo = self.network_api.allocate_for_instance( [ 607.427750] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 607.427750] env[62503]: created_port_ids = self._update_ports_for_instance( [ 607.427750] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 607.427750] env[62503]: with excutils.save_and_reraise_exception(): [ 607.427750] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 607.427750] env[62503]: self.force_reraise() [ 607.427750] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 607.427750] env[62503]: raise self.value [ 607.427750] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 607.427750] env[62503]: updated_port = self._update_port( [ 607.427750] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 607.427750] env[62503]: _ensure_no_port_binding_failure(port) [ 607.427750] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 607.427750] env[62503]: raise exception.PortBindingFailed(port_id=port['id']) [ 607.428629] env[62503]: nova.exception.PortBindingFailed: Binding failed for port f4b3539e-f084-44c5-a8e5-9aa2e32329cd, please check neutron logs for more information. [ 607.428629] env[62503]: Removing descriptor: 14 [ 607.428629] env[62503]: ERROR nova.compute.manager [None req-68ed7e64-78f9-431d-ac11-4e539144d770 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: eed1dcc3-d9f9-4211-a4c3-850dcdad72b1] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port f4b3539e-f084-44c5-a8e5-9aa2e32329cd, please check neutron logs for more information. [ 607.428629] env[62503]: ERROR nova.compute.manager [instance: eed1dcc3-d9f9-4211-a4c3-850dcdad72b1] Traceback (most recent call last): [ 607.428629] env[62503]: ERROR nova.compute.manager [instance: eed1dcc3-d9f9-4211-a4c3-850dcdad72b1] File "/opt/stack/nova/nova/compute/manager.py", line 2897, in _build_resources [ 607.428629] env[62503]: ERROR nova.compute.manager [instance: eed1dcc3-d9f9-4211-a4c3-850dcdad72b1] yield resources [ 607.428629] env[62503]: ERROR nova.compute.manager [instance: eed1dcc3-d9f9-4211-a4c3-850dcdad72b1] File "/opt/stack/nova/nova/compute/manager.py", line 2644, in _build_and_run_instance [ 607.428629] env[62503]: ERROR nova.compute.manager [instance: eed1dcc3-d9f9-4211-a4c3-850dcdad72b1] self.driver.spawn(context, instance, image_meta, [ 607.428629] env[62503]: ERROR nova.compute.manager [instance: eed1dcc3-d9f9-4211-a4c3-850dcdad72b1] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 607.428629] env[62503]: ERROR nova.compute.manager [instance: eed1dcc3-d9f9-4211-a4c3-850dcdad72b1] self._vmops.spawn(context, instance, image_meta, injected_files, [ 607.428629] env[62503]: ERROR nova.compute.manager [instance: eed1dcc3-d9f9-4211-a4c3-850dcdad72b1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 607.428629] env[62503]: ERROR nova.compute.manager [instance: eed1dcc3-d9f9-4211-a4c3-850dcdad72b1] vm_ref = self.build_virtual_machine(instance, [ 607.429015] env[62503]: ERROR nova.compute.manager [instance: eed1dcc3-d9f9-4211-a4c3-850dcdad72b1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 607.429015] env[62503]: ERROR nova.compute.manager [instance: eed1dcc3-d9f9-4211-a4c3-850dcdad72b1] vif_infos = vmwarevif.get_vif_info(self._session, [ 607.429015] env[62503]: ERROR nova.compute.manager [instance: eed1dcc3-d9f9-4211-a4c3-850dcdad72b1] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 607.429015] env[62503]: ERROR nova.compute.manager [instance: eed1dcc3-d9f9-4211-a4c3-850dcdad72b1] for vif in network_info: [ 607.429015] env[62503]: ERROR nova.compute.manager [instance: eed1dcc3-d9f9-4211-a4c3-850dcdad72b1] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 607.429015] env[62503]: ERROR nova.compute.manager [instance: eed1dcc3-d9f9-4211-a4c3-850dcdad72b1] return self._sync_wrapper(fn, *args, **kwargs) [ 607.429015] env[62503]: ERROR nova.compute.manager [instance: eed1dcc3-d9f9-4211-a4c3-850dcdad72b1] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 607.429015] env[62503]: ERROR nova.compute.manager [instance: eed1dcc3-d9f9-4211-a4c3-850dcdad72b1] self.wait() [ 607.429015] env[62503]: ERROR nova.compute.manager [instance: eed1dcc3-d9f9-4211-a4c3-850dcdad72b1] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 607.429015] env[62503]: ERROR nova.compute.manager [instance: eed1dcc3-d9f9-4211-a4c3-850dcdad72b1] self[:] = self._gt.wait() [ 607.429015] env[62503]: ERROR nova.compute.manager [instance: eed1dcc3-d9f9-4211-a4c3-850dcdad72b1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 607.429015] env[62503]: ERROR nova.compute.manager [instance: eed1dcc3-d9f9-4211-a4c3-850dcdad72b1] return self._exit_event.wait() [ 607.429015] env[62503]: ERROR nova.compute.manager [instance: eed1dcc3-d9f9-4211-a4c3-850dcdad72b1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 607.429473] env[62503]: ERROR nova.compute.manager [instance: eed1dcc3-d9f9-4211-a4c3-850dcdad72b1] current.throw(*self._exc) [ 607.429473] env[62503]: ERROR nova.compute.manager [instance: eed1dcc3-d9f9-4211-a4c3-850dcdad72b1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 607.429473] env[62503]: ERROR nova.compute.manager [instance: eed1dcc3-d9f9-4211-a4c3-850dcdad72b1] result = function(*args, **kwargs) [ 607.429473] env[62503]: ERROR nova.compute.manager [instance: eed1dcc3-d9f9-4211-a4c3-850dcdad72b1] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 607.429473] env[62503]: ERROR nova.compute.manager [instance: eed1dcc3-d9f9-4211-a4c3-850dcdad72b1] return func(*args, **kwargs) [ 607.429473] env[62503]: ERROR nova.compute.manager [instance: eed1dcc3-d9f9-4211-a4c3-850dcdad72b1] File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 607.429473] env[62503]: ERROR nova.compute.manager [instance: eed1dcc3-d9f9-4211-a4c3-850dcdad72b1] raise e [ 607.429473] env[62503]: ERROR nova.compute.manager [instance: eed1dcc3-d9f9-4211-a4c3-850dcdad72b1] File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 607.429473] env[62503]: ERROR nova.compute.manager [instance: eed1dcc3-d9f9-4211-a4c3-850dcdad72b1] nwinfo = self.network_api.allocate_for_instance( [ 607.429473] env[62503]: ERROR nova.compute.manager [instance: eed1dcc3-d9f9-4211-a4c3-850dcdad72b1] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 607.429473] env[62503]: ERROR nova.compute.manager [instance: eed1dcc3-d9f9-4211-a4c3-850dcdad72b1] created_port_ids = self._update_ports_for_instance( [ 607.429473] env[62503]: ERROR nova.compute.manager [instance: eed1dcc3-d9f9-4211-a4c3-850dcdad72b1] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 607.429473] env[62503]: ERROR nova.compute.manager [instance: eed1dcc3-d9f9-4211-a4c3-850dcdad72b1] with excutils.save_and_reraise_exception(): [ 607.429838] env[62503]: ERROR nova.compute.manager [instance: eed1dcc3-d9f9-4211-a4c3-850dcdad72b1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 607.429838] env[62503]: ERROR nova.compute.manager [instance: eed1dcc3-d9f9-4211-a4c3-850dcdad72b1] self.force_reraise() [ 607.429838] env[62503]: ERROR nova.compute.manager [instance: eed1dcc3-d9f9-4211-a4c3-850dcdad72b1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 607.429838] env[62503]: ERROR nova.compute.manager [instance: eed1dcc3-d9f9-4211-a4c3-850dcdad72b1] raise self.value [ 607.429838] env[62503]: ERROR nova.compute.manager [instance: eed1dcc3-d9f9-4211-a4c3-850dcdad72b1] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 607.429838] env[62503]: ERROR nova.compute.manager [instance: eed1dcc3-d9f9-4211-a4c3-850dcdad72b1] updated_port = self._update_port( [ 607.429838] env[62503]: ERROR nova.compute.manager [instance: eed1dcc3-d9f9-4211-a4c3-850dcdad72b1] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 607.429838] env[62503]: ERROR nova.compute.manager [instance: eed1dcc3-d9f9-4211-a4c3-850dcdad72b1] _ensure_no_port_binding_failure(port) [ 607.429838] env[62503]: ERROR nova.compute.manager [instance: eed1dcc3-d9f9-4211-a4c3-850dcdad72b1] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 607.429838] env[62503]: ERROR nova.compute.manager [instance: eed1dcc3-d9f9-4211-a4c3-850dcdad72b1] raise exception.PortBindingFailed(port_id=port['id']) [ 607.429838] env[62503]: ERROR nova.compute.manager [instance: eed1dcc3-d9f9-4211-a4c3-850dcdad72b1] nova.exception.PortBindingFailed: Binding failed for port f4b3539e-f084-44c5-a8e5-9aa2e32329cd, please check neutron logs for more information. [ 607.429838] env[62503]: ERROR nova.compute.manager [instance: eed1dcc3-d9f9-4211-a4c3-850dcdad72b1] [ 607.430179] env[62503]: INFO nova.compute.manager [None req-68ed7e64-78f9-431d-ac11-4e539144d770 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: eed1dcc3-d9f9-4211-a4c3-850dcdad72b1] Terminating instance [ 607.430212] env[62503]: DEBUG nova.scheduler.client.report [None req-6127b9b5-a976-4771-896a-db01ec307bda tempest-ImagesOneServerTestJSON-1249934967 tempest-ImagesOneServerTestJSON-1249934967-project-member] Refreshing inventories for resource provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:819}} [ 607.432264] env[62503]: DEBUG oslo_concurrency.lockutils [None req-68ed7e64-78f9-431d-ac11-4e539144d770 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Acquiring lock "refresh_cache-eed1dcc3-d9f9-4211-a4c3-850dcdad72b1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 607.432423] env[62503]: DEBUG oslo_concurrency.lockutils [None req-68ed7e64-78f9-431d-ac11-4e539144d770 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Acquired lock "refresh_cache-eed1dcc3-d9f9-4211-a4c3-850dcdad72b1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 607.432579] env[62503]: DEBUG nova.network.neutron [None req-68ed7e64-78f9-431d-ac11-4e539144d770 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: eed1dcc3-d9f9-4211-a4c3-850dcdad72b1] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 607.449113] env[62503]: DEBUG nova.scheduler.client.report [None req-6127b9b5-a976-4771-896a-db01ec307bda tempest-ImagesOneServerTestJSON-1249934967 tempest-ImagesOneServerTestJSON-1249934967-project-member] Updating ProviderTree inventory for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:783}} [ 607.449113] env[62503]: DEBUG nova.compute.provider_tree [None req-6127b9b5-a976-4771-896a-db01ec307bda tempest-ImagesOneServerTestJSON-1249934967 tempest-ImagesOneServerTestJSON-1249934967-project-member] Updating inventory in ProviderTree for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 607.460059] env[62503]: DEBUG nova.scheduler.client.report [None req-6127b9b5-a976-4771-896a-db01ec307bda tempest-ImagesOneServerTestJSON-1249934967 tempest-ImagesOneServerTestJSON-1249934967-project-member] Refreshing aggregate associations for resource provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2, aggregates: None {{(pid=62503) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:828}} [ 607.479113] env[62503]: DEBUG nova.scheduler.client.report [None req-6127b9b5-a976-4771-896a-db01ec307bda tempest-ImagesOneServerTestJSON-1249934967 tempest-ImagesOneServerTestJSON-1249934967-project-member] Refreshing trait associations for resource provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2, traits: HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=62503) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:840}} [ 607.722223] env[62503]: DEBUG nova.network.neutron [-] [instance: bef7d4e7-9af2-4071-ae6d-bdbfa7f46460] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 607.840517] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64192cc8-f9cd-4945-863d-292f65ca3243 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.848971] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e37ba3f8-11f1-442e-8360-6d19159bd878 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.878485] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-529453f4-2827-4ea7-9a74-0cf2b9af00db {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.886754] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95d4e87d-5b63-453d-bf64-0a1244446f7b {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.901565] env[62503]: DEBUG nova.compute.provider_tree [None req-6127b9b5-a976-4771-896a-db01ec307bda tempest-ImagesOneServerTestJSON-1249934967 tempest-ImagesOneServerTestJSON-1249934967-project-member] Updating inventory in ProviderTree for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 607.958127] env[62503]: DEBUG nova.network.neutron [None req-68ed7e64-78f9-431d-ac11-4e539144d770 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: eed1dcc3-d9f9-4211-a4c3-850dcdad72b1] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 608.070200] env[62503]: DEBUG nova.network.neutron [None req-68ed7e64-78f9-431d-ac11-4e539144d770 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: eed1dcc3-d9f9-4211-a4c3-850dcdad72b1] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 608.228534] env[62503]: INFO nova.compute.manager [-] [instance: bef7d4e7-9af2-4071-ae6d-bdbfa7f46460] Took 1.03 seconds to deallocate network for instance. [ 608.231857] env[62503]: DEBUG nova.compute.claims [None req-4bad5bb1-7fa9-4e2b-9ed4-ab328e1c526a tempest-ServerActionsTestOtherB-427346871 tempest-ServerActionsTestOtherB-427346871-project-member] [instance: bef7d4e7-9af2-4071-ae6d-bdbfa7f46460] Aborting claim: {{(pid=62503) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 608.232099] env[62503]: DEBUG oslo_concurrency.lockutils [None req-4bad5bb1-7fa9-4e2b-9ed4-ab328e1c526a tempest-ServerActionsTestOtherB-427346871 tempest-ServerActionsTestOtherB-427346871-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 608.432420] env[62503]: ERROR nova.scheduler.client.report [None req-6127b9b5-a976-4771-896a-db01ec307bda tempest-ImagesOneServerTestJSON-1249934967 tempest-ImagesOneServerTestJSON-1249934967-project-member] [req-8d8280cc-97e2-4aa8-99c7-e6a948ace1de] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-8d8280cc-97e2-4aa8-99c7-e6a948ace1de"}]} [ 608.450838] env[62503]: DEBUG nova.scheduler.client.report [None req-6127b9b5-a976-4771-896a-db01ec307bda tempest-ImagesOneServerTestJSON-1249934967 tempest-ImagesOneServerTestJSON-1249934967-project-member] Refreshing inventories for resource provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:819}} [ 608.474145] env[62503]: DEBUG nova.scheduler.client.report [None req-6127b9b5-a976-4771-896a-db01ec307bda tempest-ImagesOneServerTestJSON-1249934967 tempest-ImagesOneServerTestJSON-1249934967-project-member] Updating ProviderTree inventory for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:783}} [ 608.474374] env[62503]: DEBUG nova.compute.provider_tree [None req-6127b9b5-a976-4771-896a-db01ec307bda tempest-ImagesOneServerTestJSON-1249934967 tempest-ImagesOneServerTestJSON-1249934967-project-member] Updating inventory in ProviderTree for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 608.486494] env[62503]: DEBUG nova.scheduler.client.report [None req-6127b9b5-a976-4771-896a-db01ec307bda tempest-ImagesOneServerTestJSON-1249934967 tempest-ImagesOneServerTestJSON-1249934967-project-member] Refreshing aggregate associations for resource provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2, aggregates: None {{(pid=62503) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:828}} [ 608.506394] env[62503]: DEBUG nova.scheduler.client.report [None req-6127b9b5-a976-4771-896a-db01ec307bda tempest-ImagesOneServerTestJSON-1249934967 tempest-ImagesOneServerTestJSON-1249934967-project-member] Refreshing trait associations for resource provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2, traits: HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=62503) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:840}} [ 608.534193] env[62503]: DEBUG oslo_concurrency.lockutils [None req-b3d1021d-dab1-43cf-b5d3-632b4017ba90 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] Acquiring lock "3178d5cd-1937-422b-9287-970d095aa452" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 608.534453] env[62503]: DEBUG oslo_concurrency.lockutils [None req-b3d1021d-dab1-43cf-b5d3-632b4017ba90 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] Lock "3178d5cd-1937-422b-9287-970d095aa452" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 608.573377] env[62503]: DEBUG oslo_concurrency.lockutils [None req-68ed7e64-78f9-431d-ac11-4e539144d770 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Releasing lock "refresh_cache-eed1dcc3-d9f9-4211-a4c3-850dcdad72b1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 608.573824] env[62503]: DEBUG nova.compute.manager [None req-68ed7e64-78f9-431d-ac11-4e539144d770 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: eed1dcc3-d9f9-4211-a4c3-850dcdad72b1] Start destroying the instance on the hypervisor. {{(pid=62503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3156}} [ 608.574022] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-68ed7e64-78f9-431d-ac11-4e539144d770 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: eed1dcc3-d9f9-4211-a4c3-850dcdad72b1] Destroying instance {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 608.574357] env[62503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ff1a7054-82b6-4e1b-bbc9-026f1501e428 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.586986] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca7ccf4a-8cc0-408f-9280-355b751fe813 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.614177] env[62503]: WARNING nova.virt.vmwareapi.vmops [None req-68ed7e64-78f9-431d-ac11-4e539144d770 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: eed1dcc3-d9f9-4211-a4c3-850dcdad72b1] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance eed1dcc3-d9f9-4211-a4c3-850dcdad72b1 could not be found. [ 608.614177] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-68ed7e64-78f9-431d-ac11-4e539144d770 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: eed1dcc3-d9f9-4211-a4c3-850dcdad72b1] Instance destroyed {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 608.614340] env[62503]: INFO nova.compute.manager [None req-68ed7e64-78f9-431d-ac11-4e539144d770 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: eed1dcc3-d9f9-4211-a4c3-850dcdad72b1] Took 0.04 seconds to destroy the instance on the hypervisor. [ 608.614589] env[62503]: DEBUG oslo.service.loopingcall [None req-68ed7e64-78f9-431d-ac11-4e539144d770 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 608.614851] env[62503]: DEBUG nova.compute.manager [-] [instance: eed1dcc3-d9f9-4211-a4c3-850dcdad72b1] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 608.614952] env[62503]: DEBUG nova.network.neutron [-] [instance: eed1dcc3-d9f9-4211-a4c3-850dcdad72b1] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 608.631086] env[62503]: DEBUG nova.network.neutron [-] [instance: eed1dcc3-d9f9-4211-a4c3-850dcdad72b1] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 608.842283] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-301a37b6-c26d-444e-82d8-eb8fe135f858 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.850781] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f87beba-9690-4940-bcf7-0c106a13ff4f {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.881301] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12c58000-67b4-41e0-ba4b-df0115fd9c34 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.889431] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e34a438e-009b-4bd4-b5ae-2c9e84e19bb6 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.903103] env[62503]: DEBUG nova.compute.provider_tree [None req-6127b9b5-a976-4771-896a-db01ec307bda tempest-ImagesOneServerTestJSON-1249934967 tempest-ImagesOneServerTestJSON-1249934967-project-member] Updating inventory in ProviderTree for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 609.079597] env[62503]: DEBUG nova.compute.manager [req-0c76c915-c804-4808-ad38-cd1aba45e662 req-5bf5b6e9-ed90-41a7-91ca-0ad869a396aa service nova] [instance: eed1dcc3-d9f9-4211-a4c3-850dcdad72b1] Received event network-changed-f4b3539e-f084-44c5-a8e5-9aa2e32329cd {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 609.079597] env[62503]: DEBUG nova.compute.manager [req-0c76c915-c804-4808-ad38-cd1aba45e662 req-5bf5b6e9-ed90-41a7-91ca-0ad869a396aa service nova] [instance: eed1dcc3-d9f9-4211-a4c3-850dcdad72b1] Refreshing instance network info cache due to event network-changed-f4b3539e-f084-44c5-a8e5-9aa2e32329cd. {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11432}} [ 609.079597] env[62503]: DEBUG oslo_concurrency.lockutils [req-0c76c915-c804-4808-ad38-cd1aba45e662 req-5bf5b6e9-ed90-41a7-91ca-0ad869a396aa service nova] Acquiring lock "refresh_cache-eed1dcc3-d9f9-4211-a4c3-850dcdad72b1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 609.079597] env[62503]: DEBUG oslo_concurrency.lockutils [req-0c76c915-c804-4808-ad38-cd1aba45e662 req-5bf5b6e9-ed90-41a7-91ca-0ad869a396aa service nova] Acquired lock "refresh_cache-eed1dcc3-d9f9-4211-a4c3-850dcdad72b1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 609.079597] env[62503]: DEBUG nova.network.neutron [req-0c76c915-c804-4808-ad38-cd1aba45e662 req-5bf5b6e9-ed90-41a7-91ca-0ad869a396aa service nova] [instance: eed1dcc3-d9f9-4211-a4c3-850dcdad72b1] Refreshing network info cache for port f4b3539e-f084-44c5-a8e5-9aa2e32329cd {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 609.134071] env[62503]: DEBUG nova.network.neutron [-] [instance: eed1dcc3-d9f9-4211-a4c3-850dcdad72b1] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 609.428565] env[62503]: ERROR nova.scheduler.client.report [None req-6127b9b5-a976-4771-896a-db01ec307bda tempest-ImagesOneServerTestJSON-1249934967 tempest-ImagesOneServerTestJSON-1249934967-project-member] [req-32f9d2da-b129-470d-83d4-7f984243f654] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-32f9d2da-b129-470d-83d4-7f984243f654"}]} [ 609.454266] env[62503]: DEBUG nova.scheduler.client.report [None req-6127b9b5-a976-4771-896a-db01ec307bda tempest-ImagesOneServerTestJSON-1249934967 tempest-ImagesOneServerTestJSON-1249934967-project-member] Refreshing inventories for resource provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:819}} [ 609.473298] env[62503]: DEBUG nova.scheduler.client.report [None req-6127b9b5-a976-4771-896a-db01ec307bda tempest-ImagesOneServerTestJSON-1249934967 tempest-ImagesOneServerTestJSON-1249934967-project-member] Updating ProviderTree inventory for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:783}} [ 609.474382] env[62503]: DEBUG nova.compute.provider_tree [None req-6127b9b5-a976-4771-896a-db01ec307bda tempest-ImagesOneServerTestJSON-1249934967 tempest-ImagesOneServerTestJSON-1249934967-project-member] Updating inventory in ProviderTree for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 609.488294] env[62503]: DEBUG nova.scheduler.client.report [None req-6127b9b5-a976-4771-896a-db01ec307bda tempest-ImagesOneServerTestJSON-1249934967 tempest-ImagesOneServerTestJSON-1249934967-project-member] Refreshing aggregate associations for resource provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2, aggregates: None {{(pid=62503) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:828}} [ 609.511711] env[62503]: DEBUG nova.scheduler.client.report [None req-6127b9b5-a976-4771-896a-db01ec307bda tempest-ImagesOneServerTestJSON-1249934967 tempest-ImagesOneServerTestJSON-1249934967-project-member] Refreshing trait associations for resource provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2, traits: HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=62503) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:840}} [ 609.601440] env[62503]: DEBUG nova.network.neutron [req-0c76c915-c804-4808-ad38-cd1aba45e662 req-5bf5b6e9-ed90-41a7-91ca-0ad869a396aa service nova] [instance: eed1dcc3-d9f9-4211-a4c3-850dcdad72b1] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 609.639227] env[62503]: INFO nova.compute.manager [-] [instance: eed1dcc3-d9f9-4211-a4c3-850dcdad72b1] Took 1.02 seconds to deallocate network for instance. [ 609.643353] env[62503]: DEBUG nova.compute.claims [None req-68ed7e64-78f9-431d-ac11-4e539144d770 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: eed1dcc3-d9f9-4211-a4c3-850dcdad72b1] Aborting claim: {{(pid=62503) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 609.643353] env[62503]: DEBUG oslo_concurrency.lockutils [None req-68ed7e64-78f9-431d-ac11-4e539144d770 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 609.693584] env[62503]: DEBUG nova.network.neutron [req-0c76c915-c804-4808-ad38-cd1aba45e662 req-5bf5b6e9-ed90-41a7-91ca-0ad869a396aa service nova] [instance: eed1dcc3-d9f9-4211-a4c3-850dcdad72b1] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 609.714502] env[62503]: DEBUG oslo_concurrency.lockutils [None req-53bd46c4-9392-486d-a978-bb7729f67092 tempest-ServerActionsV293TestJSON-923794683 tempest-ServerActionsV293TestJSON-923794683-project-member] Acquiring lock "97ac40d6-1c29-4282-86e5-be27a20cf5e0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 609.714739] env[62503]: DEBUG oslo_concurrency.lockutils [None req-53bd46c4-9392-486d-a978-bb7729f67092 tempest-ServerActionsV293TestJSON-923794683 tempest-ServerActionsV293TestJSON-923794683-project-member] Lock "97ac40d6-1c29-4282-86e5-be27a20cf5e0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 609.878493] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25cf856a-15fe-46ee-b197-daf36f6e0a4e {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.888018] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f169612f-79d6-493d-95c6-2f3c95183a13 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.918511] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-617055cb-8127-4427-9a27-3e6338676e66 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.926766] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c12c7cd2-c111-4dbb-b982-07e048c12aa1 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.941015] env[62503]: DEBUG nova.compute.provider_tree [None req-6127b9b5-a976-4771-896a-db01ec307bda tempest-ImagesOneServerTestJSON-1249934967 tempest-ImagesOneServerTestJSON-1249934967-project-member] Updating inventory in ProviderTree for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 610.196547] env[62503]: DEBUG oslo_concurrency.lockutils [req-0c76c915-c804-4808-ad38-cd1aba45e662 req-5bf5b6e9-ed90-41a7-91ca-0ad869a396aa service nova] Releasing lock "refresh_cache-eed1dcc3-d9f9-4211-a4c3-850dcdad72b1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 610.196547] env[62503]: DEBUG nova.compute.manager [req-0c76c915-c804-4808-ad38-cd1aba45e662 req-5bf5b6e9-ed90-41a7-91ca-0ad869a396aa service nova] [instance: eed1dcc3-d9f9-4211-a4c3-850dcdad72b1] Received event network-vif-deleted-f4b3539e-f084-44c5-a8e5-9aa2e32329cd {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 610.477258] env[62503]: DEBUG nova.scheduler.client.report [None req-6127b9b5-a976-4771-896a-db01ec307bda tempest-ImagesOneServerTestJSON-1249934967 tempest-ImagesOneServerTestJSON-1249934967-project-member] Updated inventory for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 with generation 45 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:972}} [ 610.477535] env[62503]: DEBUG nova.compute.provider_tree [None req-6127b9b5-a976-4771-896a-db01ec307bda tempest-ImagesOneServerTestJSON-1249934967 tempest-ImagesOneServerTestJSON-1249934967-project-member] Updating resource provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 generation from 45 to 46 during operation: update_inventory {{(pid=62503) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 610.477763] env[62503]: DEBUG nova.compute.provider_tree [None req-6127b9b5-a976-4771-896a-db01ec307bda tempest-ImagesOneServerTestJSON-1249934967 tempest-ImagesOneServerTestJSON-1249934967-project-member] Updating inventory in ProviderTree for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 610.983145] env[62503]: DEBUG oslo_concurrency.lockutils [None req-6127b9b5-a976-4771-896a-db01ec307bda tempest-ImagesOneServerTestJSON-1249934967 tempest-ImagesOneServerTestJSON-1249934967-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 5.686s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 610.983630] env[62503]: DEBUG nova.compute.manager [None req-6127b9b5-a976-4771-896a-db01ec307bda tempest-ImagesOneServerTestJSON-1249934967 tempest-ImagesOneServerTestJSON-1249934967-project-member] [instance: 1a27278b-b930-4432-90f2-45cdf025c83e] Start building networks asynchronously for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2832}} [ 610.986469] env[62503]: DEBUG oslo_concurrency.lockutils [None req-14a931ce-672d-410b-8e2f-8af21632175d tempest-VolumesAdminNegativeTest-585860034 tempest-VolumesAdminNegativeTest-585860034-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 20.806s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 611.494797] env[62503]: DEBUG nova.compute.utils [None req-6127b9b5-a976-4771-896a-db01ec307bda tempest-ImagesOneServerTestJSON-1249934967 tempest-ImagesOneServerTestJSON-1249934967-project-member] Using /dev/sd instead of None {{(pid=62503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 611.500897] env[62503]: DEBUG nova.compute.manager [None req-6127b9b5-a976-4771-896a-db01ec307bda tempest-ImagesOneServerTestJSON-1249934967 tempest-ImagesOneServerTestJSON-1249934967-project-member] [instance: 1a27278b-b930-4432-90f2-45cdf025c83e] Allocating IP information in the background. {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 611.500897] env[62503]: DEBUG nova.network.neutron [None req-6127b9b5-a976-4771-896a-db01ec307bda tempest-ImagesOneServerTestJSON-1249934967 tempest-ImagesOneServerTestJSON-1249934967-project-member] [instance: 1a27278b-b930-4432-90f2-45cdf025c83e] allocate_for_instance() {{(pid=62503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 611.552472] env[62503]: DEBUG nova.policy [None req-6127b9b5-a976-4771-896a-db01ec307bda tempest-ImagesOneServerTestJSON-1249934967 tempest-ImagesOneServerTestJSON-1249934967-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd35813b2184c4987ba67bf36f9c5fdd5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f18117518ef14097882f898af932b572', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62503) authorize /opt/stack/nova/nova/policy.py:201}} [ 611.865280] env[62503]: DEBUG nova.network.neutron [None req-6127b9b5-a976-4771-896a-db01ec307bda tempest-ImagesOneServerTestJSON-1249934967 tempest-ImagesOneServerTestJSON-1249934967-project-member] [instance: 1a27278b-b930-4432-90f2-45cdf025c83e] Successfully created port: f3ad1bdb-6ae8-40ed-8d53-afd440e4499c {{(pid=62503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 611.918126] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98ee9490-0fd2-43a3-ab89-1499fac4f623 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.928818] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02331fb6-5d8c-4ca7-8e1c-50c67832e3dd {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.963689] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95e51e79-bc49-499d-a760-2e79bb543bb3 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.972494] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33bc38ee-0596-4eda-bbc2-d1d8b4965efc {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.987205] env[62503]: DEBUG nova.compute.provider_tree [None req-14a931ce-672d-410b-8e2f-8af21632175d tempest-VolumesAdminNegativeTest-585860034 tempest-VolumesAdminNegativeTest-585860034-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 612.003260] env[62503]: DEBUG nova.compute.manager [None req-6127b9b5-a976-4771-896a-db01ec307bda tempest-ImagesOneServerTestJSON-1249934967 tempest-ImagesOneServerTestJSON-1249934967-project-member] [instance: 1a27278b-b930-4432-90f2-45cdf025c83e] Start building block device mappings for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2867}} [ 612.490295] env[62503]: DEBUG nova.scheduler.client.report [None req-14a931ce-672d-410b-8e2f-8af21632175d tempest-VolumesAdminNegativeTest-585860034 tempest-VolumesAdminNegativeTest-585860034-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 612.754109] env[62503]: DEBUG nova.compute.manager [req-a16dc169-7fcd-430b-bdae-f54c6140e170 req-95eb8216-2b55-4277-810b-807631b64e0f service nova] [instance: 1a27278b-b930-4432-90f2-45cdf025c83e] Received event network-changed-f3ad1bdb-6ae8-40ed-8d53-afd440e4499c {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 612.754109] env[62503]: DEBUG nova.compute.manager [req-a16dc169-7fcd-430b-bdae-f54c6140e170 req-95eb8216-2b55-4277-810b-807631b64e0f service nova] [instance: 1a27278b-b930-4432-90f2-45cdf025c83e] Refreshing instance network info cache due to event network-changed-f3ad1bdb-6ae8-40ed-8d53-afd440e4499c. {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11432}} [ 612.754109] env[62503]: DEBUG oslo_concurrency.lockutils [req-a16dc169-7fcd-430b-bdae-f54c6140e170 req-95eb8216-2b55-4277-810b-807631b64e0f service nova] Acquiring lock "refresh_cache-1a27278b-b930-4432-90f2-45cdf025c83e" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 612.754109] env[62503]: DEBUG oslo_concurrency.lockutils [req-a16dc169-7fcd-430b-bdae-f54c6140e170 req-95eb8216-2b55-4277-810b-807631b64e0f service nova] Acquired lock "refresh_cache-1a27278b-b930-4432-90f2-45cdf025c83e" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 612.754109] env[62503]: DEBUG nova.network.neutron [req-a16dc169-7fcd-430b-bdae-f54c6140e170 req-95eb8216-2b55-4277-810b-807631b64e0f service nova] [instance: 1a27278b-b930-4432-90f2-45cdf025c83e] Refreshing network info cache for port f3ad1bdb-6ae8-40ed-8d53-afd440e4499c {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 612.912772] env[62503]: ERROR nova.compute.manager [None req-6127b9b5-a976-4771-896a-db01ec307bda tempest-ImagesOneServerTestJSON-1249934967 tempest-ImagesOneServerTestJSON-1249934967-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port f3ad1bdb-6ae8-40ed-8d53-afd440e4499c, please check neutron logs for more information. [ 612.912772] env[62503]: ERROR nova.compute.manager Traceback (most recent call last): [ 612.912772] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 612.912772] env[62503]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 612.912772] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 612.912772] env[62503]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 612.912772] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 612.912772] env[62503]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 612.912772] env[62503]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 612.912772] env[62503]: ERROR nova.compute.manager self.force_reraise() [ 612.912772] env[62503]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 612.912772] env[62503]: ERROR nova.compute.manager raise self.value [ 612.912772] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 612.912772] env[62503]: ERROR nova.compute.manager updated_port = self._update_port( [ 612.912772] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 612.912772] env[62503]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 612.913130] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 612.913130] env[62503]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 612.913130] env[62503]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port f3ad1bdb-6ae8-40ed-8d53-afd440e4499c, please check neutron logs for more information. [ 612.913130] env[62503]: ERROR nova.compute.manager [ 612.913130] env[62503]: Traceback (most recent call last): [ 612.913130] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 612.913130] env[62503]: listener.cb(fileno) [ 612.913130] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 612.913130] env[62503]: result = function(*args, **kwargs) [ 612.913130] env[62503]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 612.913130] env[62503]: return func(*args, **kwargs) [ 612.913130] env[62503]: File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 612.913130] env[62503]: raise e [ 612.913130] env[62503]: File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 612.913130] env[62503]: nwinfo = self.network_api.allocate_for_instance( [ 612.913130] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 612.913130] env[62503]: created_port_ids = self._update_ports_for_instance( [ 612.913130] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 612.913130] env[62503]: with excutils.save_and_reraise_exception(): [ 612.913130] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 612.913130] env[62503]: self.force_reraise() [ 612.913130] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 612.913130] env[62503]: raise self.value [ 612.913130] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 612.913130] env[62503]: updated_port = self._update_port( [ 612.913130] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 612.913130] env[62503]: _ensure_no_port_binding_failure(port) [ 612.913130] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 612.913130] env[62503]: raise exception.PortBindingFailed(port_id=port['id']) [ 612.913831] env[62503]: nova.exception.PortBindingFailed: Binding failed for port f3ad1bdb-6ae8-40ed-8d53-afd440e4499c, please check neutron logs for more information. [ 612.913831] env[62503]: Removing descriptor: 14 [ 612.997183] env[62503]: DEBUG oslo_concurrency.lockutils [None req-14a931ce-672d-410b-8e2f-8af21632175d tempest-VolumesAdminNegativeTest-585860034 tempest-VolumesAdminNegativeTest-585860034-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.011s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 612.997849] env[62503]: ERROR nova.compute.manager [None req-14a931ce-672d-410b-8e2f-8af21632175d tempest-VolumesAdminNegativeTest-585860034 tempest-VolumesAdminNegativeTest-585860034-project-member] [instance: 59fd4a4a-20f5-4b8f-970a-acfc882f45a3] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port e524eb4d-de41-4da6-9bfa-395d033d0529, please check neutron logs for more information. [ 612.997849] env[62503]: ERROR nova.compute.manager [instance: 59fd4a4a-20f5-4b8f-970a-acfc882f45a3] Traceback (most recent call last): [ 612.997849] env[62503]: ERROR nova.compute.manager [instance: 59fd4a4a-20f5-4b8f-970a-acfc882f45a3] File "/opt/stack/nova/nova/compute/manager.py", line 2644, in _build_and_run_instance [ 612.997849] env[62503]: ERROR nova.compute.manager [instance: 59fd4a4a-20f5-4b8f-970a-acfc882f45a3] self.driver.spawn(context, instance, image_meta, [ 612.997849] env[62503]: ERROR nova.compute.manager [instance: 59fd4a4a-20f5-4b8f-970a-acfc882f45a3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 612.997849] env[62503]: ERROR nova.compute.manager [instance: 59fd4a4a-20f5-4b8f-970a-acfc882f45a3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 612.997849] env[62503]: ERROR nova.compute.manager [instance: 59fd4a4a-20f5-4b8f-970a-acfc882f45a3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 612.997849] env[62503]: ERROR nova.compute.manager [instance: 59fd4a4a-20f5-4b8f-970a-acfc882f45a3] vm_ref = self.build_virtual_machine(instance, [ 612.997849] env[62503]: ERROR nova.compute.manager [instance: 59fd4a4a-20f5-4b8f-970a-acfc882f45a3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 612.997849] env[62503]: ERROR nova.compute.manager [instance: 59fd4a4a-20f5-4b8f-970a-acfc882f45a3] vif_infos = vmwarevif.get_vif_info(self._session, [ 612.997849] env[62503]: ERROR nova.compute.manager [instance: 59fd4a4a-20f5-4b8f-970a-acfc882f45a3] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 612.998130] env[62503]: ERROR nova.compute.manager [instance: 59fd4a4a-20f5-4b8f-970a-acfc882f45a3] for vif in network_info: [ 612.998130] env[62503]: ERROR nova.compute.manager [instance: 59fd4a4a-20f5-4b8f-970a-acfc882f45a3] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 612.998130] env[62503]: ERROR nova.compute.manager [instance: 59fd4a4a-20f5-4b8f-970a-acfc882f45a3] return self._sync_wrapper(fn, *args, **kwargs) [ 612.998130] env[62503]: ERROR nova.compute.manager [instance: 59fd4a4a-20f5-4b8f-970a-acfc882f45a3] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 612.998130] env[62503]: ERROR nova.compute.manager [instance: 59fd4a4a-20f5-4b8f-970a-acfc882f45a3] self.wait() [ 612.998130] env[62503]: ERROR nova.compute.manager [instance: 59fd4a4a-20f5-4b8f-970a-acfc882f45a3] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 612.998130] env[62503]: ERROR nova.compute.manager [instance: 59fd4a4a-20f5-4b8f-970a-acfc882f45a3] self[:] = self._gt.wait() [ 612.998130] env[62503]: ERROR nova.compute.manager [instance: 59fd4a4a-20f5-4b8f-970a-acfc882f45a3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 612.998130] env[62503]: ERROR nova.compute.manager [instance: 59fd4a4a-20f5-4b8f-970a-acfc882f45a3] return self._exit_event.wait() [ 612.998130] env[62503]: ERROR nova.compute.manager [instance: 59fd4a4a-20f5-4b8f-970a-acfc882f45a3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 612.998130] env[62503]: ERROR nova.compute.manager [instance: 59fd4a4a-20f5-4b8f-970a-acfc882f45a3] result = hub.switch() [ 612.998130] env[62503]: ERROR nova.compute.manager [instance: 59fd4a4a-20f5-4b8f-970a-acfc882f45a3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 612.998130] env[62503]: ERROR nova.compute.manager [instance: 59fd4a4a-20f5-4b8f-970a-acfc882f45a3] return self.greenlet.switch() [ 612.998389] env[62503]: ERROR nova.compute.manager [instance: 59fd4a4a-20f5-4b8f-970a-acfc882f45a3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 612.998389] env[62503]: ERROR nova.compute.manager [instance: 59fd4a4a-20f5-4b8f-970a-acfc882f45a3] result = function(*args, **kwargs) [ 612.998389] env[62503]: ERROR nova.compute.manager [instance: 59fd4a4a-20f5-4b8f-970a-acfc882f45a3] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 612.998389] env[62503]: ERROR nova.compute.manager [instance: 59fd4a4a-20f5-4b8f-970a-acfc882f45a3] return func(*args, **kwargs) [ 612.998389] env[62503]: ERROR nova.compute.manager [instance: 59fd4a4a-20f5-4b8f-970a-acfc882f45a3] File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 612.998389] env[62503]: ERROR nova.compute.manager [instance: 59fd4a4a-20f5-4b8f-970a-acfc882f45a3] raise e [ 612.998389] env[62503]: ERROR nova.compute.manager [instance: 59fd4a4a-20f5-4b8f-970a-acfc882f45a3] File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 612.998389] env[62503]: ERROR nova.compute.manager [instance: 59fd4a4a-20f5-4b8f-970a-acfc882f45a3] nwinfo = self.network_api.allocate_for_instance( [ 612.998389] env[62503]: ERROR nova.compute.manager [instance: 59fd4a4a-20f5-4b8f-970a-acfc882f45a3] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 612.998389] env[62503]: ERROR nova.compute.manager [instance: 59fd4a4a-20f5-4b8f-970a-acfc882f45a3] created_port_ids = self._update_ports_for_instance( [ 612.998389] env[62503]: ERROR nova.compute.manager [instance: 59fd4a4a-20f5-4b8f-970a-acfc882f45a3] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 612.998389] env[62503]: ERROR nova.compute.manager [instance: 59fd4a4a-20f5-4b8f-970a-acfc882f45a3] with excutils.save_and_reraise_exception(): [ 612.998389] env[62503]: ERROR nova.compute.manager [instance: 59fd4a4a-20f5-4b8f-970a-acfc882f45a3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 612.998655] env[62503]: ERROR nova.compute.manager [instance: 59fd4a4a-20f5-4b8f-970a-acfc882f45a3] self.force_reraise() [ 612.998655] env[62503]: ERROR nova.compute.manager [instance: 59fd4a4a-20f5-4b8f-970a-acfc882f45a3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 612.998655] env[62503]: ERROR nova.compute.manager [instance: 59fd4a4a-20f5-4b8f-970a-acfc882f45a3] raise self.value [ 612.998655] env[62503]: ERROR nova.compute.manager [instance: 59fd4a4a-20f5-4b8f-970a-acfc882f45a3] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 612.998655] env[62503]: ERROR nova.compute.manager [instance: 59fd4a4a-20f5-4b8f-970a-acfc882f45a3] updated_port = self._update_port( [ 612.998655] env[62503]: ERROR nova.compute.manager [instance: 59fd4a4a-20f5-4b8f-970a-acfc882f45a3] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 612.998655] env[62503]: ERROR nova.compute.manager [instance: 59fd4a4a-20f5-4b8f-970a-acfc882f45a3] _ensure_no_port_binding_failure(port) [ 612.998655] env[62503]: ERROR nova.compute.manager [instance: 59fd4a4a-20f5-4b8f-970a-acfc882f45a3] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 612.998655] env[62503]: ERROR nova.compute.manager [instance: 59fd4a4a-20f5-4b8f-970a-acfc882f45a3] raise exception.PortBindingFailed(port_id=port['id']) [ 612.998655] env[62503]: ERROR nova.compute.manager [instance: 59fd4a4a-20f5-4b8f-970a-acfc882f45a3] nova.exception.PortBindingFailed: Binding failed for port e524eb4d-de41-4da6-9bfa-395d033d0529, please check neutron logs for more information. [ 612.998655] env[62503]: ERROR nova.compute.manager [instance: 59fd4a4a-20f5-4b8f-970a-acfc882f45a3] [ 612.998877] env[62503]: DEBUG nova.compute.utils [None req-14a931ce-672d-410b-8e2f-8af21632175d tempest-VolumesAdminNegativeTest-585860034 tempest-VolumesAdminNegativeTest-585860034-project-member] [instance: 59fd4a4a-20f5-4b8f-970a-acfc882f45a3] Binding failed for port e524eb4d-de41-4da6-9bfa-395d033d0529, please check neutron logs for more information. {{(pid=62503) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 612.999788] env[62503]: DEBUG oslo_concurrency.lockutils [None req-733aadb7-44ff-4603-9cb7-3c04ddeea34f tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.353s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 613.001295] env[62503]: INFO nova.compute.claims [None req-733aadb7-44ff-4603-9cb7-3c04ddeea34f tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] [instance: 30befad4-aacb-44d5-87ed-4fc6b0e34bd6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 613.003989] env[62503]: DEBUG nova.compute.manager [None req-14a931ce-672d-410b-8e2f-8af21632175d tempest-VolumesAdminNegativeTest-585860034 tempest-VolumesAdminNegativeTest-585860034-project-member] [instance: 59fd4a4a-20f5-4b8f-970a-acfc882f45a3] Build of instance 59fd4a4a-20f5-4b8f-970a-acfc882f45a3 was re-scheduled: Binding failed for port e524eb4d-de41-4da6-9bfa-395d033d0529, please check neutron logs for more information. {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2483}} [ 613.004522] env[62503]: DEBUG nova.compute.manager [None req-14a931ce-672d-410b-8e2f-8af21632175d tempest-VolumesAdminNegativeTest-585860034 tempest-VolumesAdminNegativeTest-585860034-project-member] [instance: 59fd4a4a-20f5-4b8f-970a-acfc882f45a3] Unplugging VIFs for instance {{(pid=62503) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3009}} [ 613.004743] env[62503]: DEBUG oslo_concurrency.lockutils [None req-14a931ce-672d-410b-8e2f-8af21632175d tempest-VolumesAdminNegativeTest-585860034 tempest-VolumesAdminNegativeTest-585860034-project-member] Acquiring lock "refresh_cache-59fd4a4a-20f5-4b8f-970a-acfc882f45a3" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 613.004902] env[62503]: DEBUG oslo_concurrency.lockutils [None req-14a931ce-672d-410b-8e2f-8af21632175d tempest-VolumesAdminNegativeTest-585860034 tempest-VolumesAdminNegativeTest-585860034-project-member] Acquired lock "refresh_cache-59fd4a4a-20f5-4b8f-970a-acfc882f45a3" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 613.005085] env[62503]: DEBUG nova.network.neutron [None req-14a931ce-672d-410b-8e2f-8af21632175d tempest-VolumesAdminNegativeTest-585860034 tempest-VolumesAdminNegativeTest-585860034-project-member] [instance: 59fd4a4a-20f5-4b8f-970a-acfc882f45a3] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 613.013268] env[62503]: DEBUG nova.compute.manager [None req-6127b9b5-a976-4771-896a-db01ec307bda tempest-ImagesOneServerTestJSON-1249934967 tempest-ImagesOneServerTestJSON-1249934967-project-member] [instance: 1a27278b-b930-4432-90f2-45cdf025c83e] Start spawning the instance on the hypervisor. {{(pid=62503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2641}} [ 613.040423] env[62503]: DEBUG nova.virt.hardware [None req-6127b9b5-a976-4771-896a-db01ec307bda tempest-ImagesOneServerTestJSON-1249934967 tempest-ImagesOneServerTestJSON-1249934967-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:26:20Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:26:03Z,direct_url=,disk_format='vmdk',id=8150ca02-f879-471d-8913-459408f127a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26d9ee75d25b4018b8bb1fb11c8bd98c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:26:04Z,virtual_size=,visibility=), allow threads: False {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 613.040681] env[62503]: DEBUG nova.virt.hardware [None req-6127b9b5-a976-4771-896a-db01ec307bda tempest-ImagesOneServerTestJSON-1249934967 tempest-ImagesOneServerTestJSON-1249934967-project-member] Flavor limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 613.040835] env[62503]: DEBUG nova.virt.hardware [None req-6127b9b5-a976-4771-896a-db01ec307bda tempest-ImagesOneServerTestJSON-1249934967 tempest-ImagesOneServerTestJSON-1249934967-project-member] Image limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 613.041024] env[62503]: DEBUG nova.virt.hardware [None req-6127b9b5-a976-4771-896a-db01ec307bda tempest-ImagesOneServerTestJSON-1249934967 tempest-ImagesOneServerTestJSON-1249934967-project-member] Flavor pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 613.041179] env[62503]: DEBUG nova.virt.hardware [None req-6127b9b5-a976-4771-896a-db01ec307bda tempest-ImagesOneServerTestJSON-1249934967 tempest-ImagesOneServerTestJSON-1249934967-project-member] Image pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 613.041323] env[62503]: DEBUG nova.virt.hardware [None req-6127b9b5-a976-4771-896a-db01ec307bda tempest-ImagesOneServerTestJSON-1249934967 tempest-ImagesOneServerTestJSON-1249934967-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 613.041527] env[62503]: DEBUG nova.virt.hardware [None req-6127b9b5-a976-4771-896a-db01ec307bda tempest-ImagesOneServerTestJSON-1249934967 tempest-ImagesOneServerTestJSON-1249934967-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 613.041682] env[62503]: DEBUG nova.virt.hardware [None req-6127b9b5-a976-4771-896a-db01ec307bda tempest-ImagesOneServerTestJSON-1249934967 tempest-ImagesOneServerTestJSON-1249934967-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 613.041846] env[62503]: DEBUG nova.virt.hardware [None req-6127b9b5-a976-4771-896a-db01ec307bda tempest-ImagesOneServerTestJSON-1249934967 tempest-ImagesOneServerTestJSON-1249934967-project-member] Got 1 possible topologies {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 613.042013] env[62503]: DEBUG nova.virt.hardware [None req-6127b9b5-a976-4771-896a-db01ec307bda tempest-ImagesOneServerTestJSON-1249934967 tempest-ImagesOneServerTestJSON-1249934967-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 613.042204] env[62503]: DEBUG nova.virt.hardware [None req-6127b9b5-a976-4771-896a-db01ec307bda tempest-ImagesOneServerTestJSON-1249934967 tempest-ImagesOneServerTestJSON-1249934967-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 613.043071] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fa38bec-4606-4d68-9684-cc6dd73e5d08 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.052902] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-604ebe05-e4f9-4ed6-8111-c25ab08a909d {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.067591] env[62503]: ERROR nova.compute.manager [None req-6127b9b5-a976-4771-896a-db01ec307bda tempest-ImagesOneServerTestJSON-1249934967 tempest-ImagesOneServerTestJSON-1249934967-project-member] [instance: 1a27278b-b930-4432-90f2-45cdf025c83e] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port f3ad1bdb-6ae8-40ed-8d53-afd440e4499c, please check neutron logs for more information. [ 613.067591] env[62503]: ERROR nova.compute.manager [instance: 1a27278b-b930-4432-90f2-45cdf025c83e] Traceback (most recent call last): [ 613.067591] env[62503]: ERROR nova.compute.manager [instance: 1a27278b-b930-4432-90f2-45cdf025c83e] File "/opt/stack/nova/nova/compute/manager.py", line 2897, in _build_resources [ 613.067591] env[62503]: ERROR nova.compute.manager [instance: 1a27278b-b930-4432-90f2-45cdf025c83e] yield resources [ 613.067591] env[62503]: ERROR nova.compute.manager [instance: 1a27278b-b930-4432-90f2-45cdf025c83e] File "/opt/stack/nova/nova/compute/manager.py", line 2644, in _build_and_run_instance [ 613.067591] env[62503]: ERROR nova.compute.manager [instance: 1a27278b-b930-4432-90f2-45cdf025c83e] self.driver.spawn(context, instance, image_meta, [ 613.067591] env[62503]: ERROR nova.compute.manager [instance: 1a27278b-b930-4432-90f2-45cdf025c83e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 613.067591] env[62503]: ERROR nova.compute.manager [instance: 1a27278b-b930-4432-90f2-45cdf025c83e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 613.067591] env[62503]: ERROR nova.compute.manager [instance: 1a27278b-b930-4432-90f2-45cdf025c83e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 613.067591] env[62503]: ERROR nova.compute.manager [instance: 1a27278b-b930-4432-90f2-45cdf025c83e] vm_ref = self.build_virtual_machine(instance, [ 613.067591] env[62503]: ERROR nova.compute.manager [instance: 1a27278b-b930-4432-90f2-45cdf025c83e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 613.067918] env[62503]: ERROR nova.compute.manager [instance: 1a27278b-b930-4432-90f2-45cdf025c83e] vif_infos = vmwarevif.get_vif_info(self._session, [ 613.067918] env[62503]: ERROR nova.compute.manager [instance: 1a27278b-b930-4432-90f2-45cdf025c83e] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 613.067918] env[62503]: ERROR nova.compute.manager [instance: 1a27278b-b930-4432-90f2-45cdf025c83e] for vif in network_info: [ 613.067918] env[62503]: ERROR nova.compute.manager [instance: 1a27278b-b930-4432-90f2-45cdf025c83e] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 613.067918] env[62503]: ERROR nova.compute.manager [instance: 1a27278b-b930-4432-90f2-45cdf025c83e] return self._sync_wrapper(fn, *args, **kwargs) [ 613.067918] env[62503]: ERROR nova.compute.manager [instance: 1a27278b-b930-4432-90f2-45cdf025c83e] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 613.067918] env[62503]: ERROR nova.compute.manager [instance: 1a27278b-b930-4432-90f2-45cdf025c83e] self.wait() [ 613.067918] env[62503]: ERROR nova.compute.manager [instance: 1a27278b-b930-4432-90f2-45cdf025c83e] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 613.067918] env[62503]: ERROR nova.compute.manager [instance: 1a27278b-b930-4432-90f2-45cdf025c83e] self[:] = self._gt.wait() [ 613.067918] env[62503]: ERROR nova.compute.manager [instance: 1a27278b-b930-4432-90f2-45cdf025c83e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 613.067918] env[62503]: ERROR nova.compute.manager [instance: 1a27278b-b930-4432-90f2-45cdf025c83e] return self._exit_event.wait() [ 613.067918] env[62503]: ERROR nova.compute.manager [instance: 1a27278b-b930-4432-90f2-45cdf025c83e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 613.067918] env[62503]: ERROR nova.compute.manager [instance: 1a27278b-b930-4432-90f2-45cdf025c83e] current.throw(*self._exc) [ 613.068221] env[62503]: ERROR nova.compute.manager [instance: 1a27278b-b930-4432-90f2-45cdf025c83e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 613.068221] env[62503]: ERROR nova.compute.manager [instance: 1a27278b-b930-4432-90f2-45cdf025c83e] result = function(*args, **kwargs) [ 613.068221] env[62503]: ERROR nova.compute.manager [instance: 1a27278b-b930-4432-90f2-45cdf025c83e] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 613.068221] env[62503]: ERROR nova.compute.manager [instance: 1a27278b-b930-4432-90f2-45cdf025c83e] return func(*args, **kwargs) [ 613.068221] env[62503]: ERROR nova.compute.manager [instance: 1a27278b-b930-4432-90f2-45cdf025c83e] File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 613.068221] env[62503]: ERROR nova.compute.manager [instance: 1a27278b-b930-4432-90f2-45cdf025c83e] raise e [ 613.068221] env[62503]: ERROR nova.compute.manager [instance: 1a27278b-b930-4432-90f2-45cdf025c83e] File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 613.068221] env[62503]: ERROR nova.compute.manager [instance: 1a27278b-b930-4432-90f2-45cdf025c83e] nwinfo = self.network_api.allocate_for_instance( [ 613.068221] env[62503]: ERROR nova.compute.manager [instance: 1a27278b-b930-4432-90f2-45cdf025c83e] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 613.068221] env[62503]: ERROR nova.compute.manager [instance: 1a27278b-b930-4432-90f2-45cdf025c83e] created_port_ids = self._update_ports_for_instance( [ 613.068221] env[62503]: ERROR nova.compute.manager [instance: 1a27278b-b930-4432-90f2-45cdf025c83e] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 613.068221] env[62503]: ERROR nova.compute.manager [instance: 1a27278b-b930-4432-90f2-45cdf025c83e] with excutils.save_and_reraise_exception(): [ 613.068221] env[62503]: ERROR nova.compute.manager [instance: 1a27278b-b930-4432-90f2-45cdf025c83e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 613.068596] env[62503]: ERROR nova.compute.manager [instance: 1a27278b-b930-4432-90f2-45cdf025c83e] self.force_reraise() [ 613.068596] env[62503]: ERROR nova.compute.manager [instance: 1a27278b-b930-4432-90f2-45cdf025c83e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 613.068596] env[62503]: ERROR nova.compute.manager [instance: 1a27278b-b930-4432-90f2-45cdf025c83e] raise self.value [ 613.068596] env[62503]: ERROR nova.compute.manager [instance: 1a27278b-b930-4432-90f2-45cdf025c83e] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 613.068596] env[62503]: ERROR nova.compute.manager [instance: 1a27278b-b930-4432-90f2-45cdf025c83e] updated_port = self._update_port( [ 613.068596] env[62503]: ERROR nova.compute.manager [instance: 1a27278b-b930-4432-90f2-45cdf025c83e] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 613.068596] env[62503]: ERROR nova.compute.manager [instance: 1a27278b-b930-4432-90f2-45cdf025c83e] _ensure_no_port_binding_failure(port) [ 613.068596] env[62503]: ERROR nova.compute.manager [instance: 1a27278b-b930-4432-90f2-45cdf025c83e] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 613.068596] env[62503]: ERROR nova.compute.manager [instance: 1a27278b-b930-4432-90f2-45cdf025c83e] raise exception.PortBindingFailed(port_id=port['id']) [ 613.068596] env[62503]: ERROR nova.compute.manager [instance: 1a27278b-b930-4432-90f2-45cdf025c83e] nova.exception.PortBindingFailed: Binding failed for port f3ad1bdb-6ae8-40ed-8d53-afd440e4499c, please check neutron logs for more information. [ 613.068596] env[62503]: ERROR nova.compute.manager [instance: 1a27278b-b930-4432-90f2-45cdf025c83e] [ 613.068596] env[62503]: INFO nova.compute.manager [None req-6127b9b5-a976-4771-896a-db01ec307bda tempest-ImagesOneServerTestJSON-1249934967 tempest-ImagesOneServerTestJSON-1249934967-project-member] [instance: 1a27278b-b930-4432-90f2-45cdf025c83e] Terminating instance [ 613.070219] env[62503]: DEBUG oslo_concurrency.lockutils [None req-6127b9b5-a976-4771-896a-db01ec307bda tempest-ImagesOneServerTestJSON-1249934967 tempest-ImagesOneServerTestJSON-1249934967-project-member] Acquiring lock "refresh_cache-1a27278b-b930-4432-90f2-45cdf025c83e" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 613.272399] env[62503]: DEBUG nova.network.neutron [req-a16dc169-7fcd-430b-bdae-f54c6140e170 req-95eb8216-2b55-4277-810b-807631b64e0f service nova] [instance: 1a27278b-b930-4432-90f2-45cdf025c83e] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 613.368803] env[62503]: DEBUG nova.network.neutron [req-a16dc169-7fcd-430b-bdae-f54c6140e170 req-95eb8216-2b55-4277-810b-807631b64e0f service nova] [instance: 1a27278b-b930-4432-90f2-45cdf025c83e] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 613.523409] env[62503]: DEBUG nova.network.neutron [None req-14a931ce-672d-410b-8e2f-8af21632175d tempest-VolumesAdminNegativeTest-585860034 tempest-VolumesAdminNegativeTest-585860034-project-member] [instance: 59fd4a4a-20f5-4b8f-970a-acfc882f45a3] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 613.620616] env[62503]: DEBUG nova.network.neutron [None req-14a931ce-672d-410b-8e2f-8af21632175d tempest-VolumesAdminNegativeTest-585860034 tempest-VolumesAdminNegativeTest-585860034-project-member] [instance: 59fd4a4a-20f5-4b8f-970a-acfc882f45a3] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 613.871323] env[62503]: DEBUG oslo_concurrency.lockutils [req-a16dc169-7fcd-430b-bdae-f54c6140e170 req-95eb8216-2b55-4277-810b-807631b64e0f service nova] Releasing lock "refresh_cache-1a27278b-b930-4432-90f2-45cdf025c83e" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 613.871713] env[62503]: DEBUG oslo_concurrency.lockutils [None req-6127b9b5-a976-4771-896a-db01ec307bda tempest-ImagesOneServerTestJSON-1249934967 tempest-ImagesOneServerTestJSON-1249934967-project-member] Acquired lock "refresh_cache-1a27278b-b930-4432-90f2-45cdf025c83e" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 613.871900] env[62503]: DEBUG nova.network.neutron [None req-6127b9b5-a976-4771-896a-db01ec307bda tempest-ImagesOneServerTestJSON-1249934967 tempest-ImagesOneServerTestJSON-1249934967-project-member] [instance: 1a27278b-b930-4432-90f2-45cdf025c83e] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 614.123800] env[62503]: DEBUG oslo_concurrency.lockutils [None req-14a931ce-672d-410b-8e2f-8af21632175d tempest-VolumesAdminNegativeTest-585860034 tempest-VolumesAdminNegativeTest-585860034-project-member] Releasing lock "refresh_cache-59fd4a4a-20f5-4b8f-970a-acfc882f45a3" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 614.123800] env[62503]: DEBUG nova.compute.manager [None req-14a931ce-672d-410b-8e2f-8af21632175d tempest-VolumesAdminNegativeTest-585860034 tempest-VolumesAdminNegativeTest-585860034-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62503) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3032}} [ 614.123800] env[62503]: DEBUG nova.compute.manager [None req-14a931ce-672d-410b-8e2f-8af21632175d tempest-VolumesAdminNegativeTest-585860034 tempest-VolumesAdminNegativeTest-585860034-project-member] [instance: 59fd4a4a-20f5-4b8f-970a-acfc882f45a3] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 614.123800] env[62503]: DEBUG nova.network.neutron [None req-14a931ce-672d-410b-8e2f-8af21632175d tempest-VolumesAdminNegativeTest-585860034 tempest-VolumesAdminNegativeTest-585860034-project-member] [instance: 59fd4a4a-20f5-4b8f-970a-acfc882f45a3] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 614.137792] env[62503]: DEBUG nova.network.neutron [None req-14a931ce-672d-410b-8e2f-8af21632175d tempest-VolumesAdminNegativeTest-585860034 tempest-VolumesAdminNegativeTest-585860034-project-member] [instance: 59fd4a4a-20f5-4b8f-970a-acfc882f45a3] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 614.341183] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b458bb7-e924-43a2-9248-02dbaac963bc {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.349665] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c0c8e89-2f1a-4ab2-823f-027cd05cece7 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.381901] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bb03726-5d4d-4b8c-a351-d912356cbd4e {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.390165] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff400980-69a1-41ac-b3df-568bc3a4f402 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.404442] env[62503]: DEBUG nova.compute.provider_tree [None req-733aadb7-44ff-4603-9cb7-3c04ddeea34f tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 614.406306] env[62503]: DEBUG nova.network.neutron [None req-6127b9b5-a976-4771-896a-db01ec307bda tempest-ImagesOneServerTestJSON-1249934967 tempest-ImagesOneServerTestJSON-1249934967-project-member] [instance: 1a27278b-b930-4432-90f2-45cdf025c83e] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 614.500719] env[62503]: DEBUG nova.network.neutron [None req-6127b9b5-a976-4771-896a-db01ec307bda tempest-ImagesOneServerTestJSON-1249934967 tempest-ImagesOneServerTestJSON-1249934967-project-member] [instance: 1a27278b-b930-4432-90f2-45cdf025c83e] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 614.641094] env[62503]: DEBUG nova.network.neutron [None req-14a931ce-672d-410b-8e2f-8af21632175d tempest-VolumesAdminNegativeTest-585860034 tempest-VolumesAdminNegativeTest-585860034-project-member] [instance: 59fd4a4a-20f5-4b8f-970a-acfc882f45a3] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 614.779799] env[62503]: DEBUG nova.compute.manager [req-5d7a8ab9-3537-4e83-bf67-97e7f7d0114c req-33ba2793-0a58-4708-9e31-c10cb171f11c service nova] [instance: 1a27278b-b930-4432-90f2-45cdf025c83e] Received event network-vif-deleted-f3ad1bdb-6ae8-40ed-8d53-afd440e4499c {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 614.909334] env[62503]: DEBUG nova.scheduler.client.report [None req-733aadb7-44ff-4603-9cb7-3c04ddeea34f tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 615.003665] env[62503]: DEBUG oslo_concurrency.lockutils [None req-6127b9b5-a976-4771-896a-db01ec307bda tempest-ImagesOneServerTestJSON-1249934967 tempest-ImagesOneServerTestJSON-1249934967-project-member] Releasing lock "refresh_cache-1a27278b-b930-4432-90f2-45cdf025c83e" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 615.004099] env[62503]: DEBUG nova.compute.manager [None req-6127b9b5-a976-4771-896a-db01ec307bda tempest-ImagesOneServerTestJSON-1249934967 tempest-ImagesOneServerTestJSON-1249934967-project-member] [instance: 1a27278b-b930-4432-90f2-45cdf025c83e] Start destroying the instance on the hypervisor. {{(pid=62503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3156}} [ 615.004364] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-6127b9b5-a976-4771-896a-db01ec307bda tempest-ImagesOneServerTestJSON-1249934967 tempest-ImagesOneServerTestJSON-1249934967-project-member] [instance: 1a27278b-b930-4432-90f2-45cdf025c83e] Destroying instance {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 615.004706] env[62503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-831061c5-c002-42bc-8165-331313c28623 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.015165] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6a2f72b-8b66-44aa-aa60-14947fecd7c0 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.038476] env[62503]: WARNING nova.virt.vmwareapi.vmops [None req-6127b9b5-a976-4771-896a-db01ec307bda tempest-ImagesOneServerTestJSON-1249934967 tempest-ImagesOneServerTestJSON-1249934967-project-member] [instance: 1a27278b-b930-4432-90f2-45cdf025c83e] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 1a27278b-b930-4432-90f2-45cdf025c83e could not be found. [ 615.038691] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-6127b9b5-a976-4771-896a-db01ec307bda tempest-ImagesOneServerTestJSON-1249934967 tempest-ImagesOneServerTestJSON-1249934967-project-member] [instance: 1a27278b-b930-4432-90f2-45cdf025c83e] Instance destroyed {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 615.038872] env[62503]: INFO nova.compute.manager [None req-6127b9b5-a976-4771-896a-db01ec307bda tempest-ImagesOneServerTestJSON-1249934967 tempest-ImagesOneServerTestJSON-1249934967-project-member] [instance: 1a27278b-b930-4432-90f2-45cdf025c83e] Took 0.03 seconds to destroy the instance on the hypervisor. [ 615.039126] env[62503]: DEBUG oslo.service.loopingcall [None req-6127b9b5-a976-4771-896a-db01ec307bda tempest-ImagesOneServerTestJSON-1249934967 tempest-ImagesOneServerTestJSON-1249934967-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 615.039346] env[62503]: DEBUG nova.compute.manager [-] [instance: 1a27278b-b930-4432-90f2-45cdf025c83e] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 615.039437] env[62503]: DEBUG nova.network.neutron [-] [instance: 1a27278b-b930-4432-90f2-45cdf025c83e] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 615.053847] env[62503]: DEBUG nova.network.neutron [-] [instance: 1a27278b-b930-4432-90f2-45cdf025c83e] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 615.143807] env[62503]: INFO nova.compute.manager [None req-14a931ce-672d-410b-8e2f-8af21632175d tempest-VolumesAdminNegativeTest-585860034 tempest-VolumesAdminNegativeTest-585860034-project-member] [instance: 59fd4a4a-20f5-4b8f-970a-acfc882f45a3] Took 1.02 seconds to deallocate network for instance. [ 615.415838] env[62503]: DEBUG oslo_concurrency.lockutils [None req-733aadb7-44ff-4603-9cb7-3c04ddeea34f tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.416s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 615.416428] env[62503]: DEBUG nova.compute.manager [None req-733aadb7-44ff-4603-9cb7-3c04ddeea34f tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] [instance: 30befad4-aacb-44d5-87ed-4fc6b0e34bd6] Start building networks asynchronously for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2832}} [ 615.419453] env[62503]: DEBUG oslo_concurrency.lockutils [None req-2ad81782-4e29-40f3-9961-e4f97d1aa36a tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 24.252s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 615.419990] env[62503]: DEBUG nova.objects.instance [None req-2ad81782-4e29-40f3-9961-e4f97d1aa36a tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] [instance: c4a88e75-690f-4bed-a4f9-a0de3b193eff] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62503) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 615.557876] env[62503]: DEBUG nova.network.neutron [-] [instance: 1a27278b-b930-4432-90f2-45cdf025c83e] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 615.926758] env[62503]: DEBUG nova.compute.utils [None req-733aadb7-44ff-4603-9cb7-3c04ddeea34f tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] Using /dev/sd instead of None {{(pid=62503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 615.928078] env[62503]: DEBUG nova.compute.manager [None req-733aadb7-44ff-4603-9cb7-3c04ddeea34f tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] [instance: 30befad4-aacb-44d5-87ed-4fc6b0e34bd6] Allocating IP information in the background. {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 615.928254] env[62503]: DEBUG nova.network.neutron [None req-733aadb7-44ff-4603-9cb7-3c04ddeea34f tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] [instance: 30befad4-aacb-44d5-87ed-4fc6b0e34bd6] allocate_for_instance() {{(pid=62503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 615.967845] env[62503]: DEBUG nova.policy [None req-733aadb7-44ff-4603-9cb7-3c04ddeea34f tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8c41319365a5412b9bf7480a7edba4bb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd69e3630f8144c288f8685c2201779ef', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62503) authorize /opt/stack/nova/nova/policy.py:201}} [ 616.061602] env[62503]: INFO nova.compute.manager [-] [instance: 1a27278b-b930-4432-90f2-45cdf025c83e] Took 1.02 seconds to deallocate network for instance. [ 616.064077] env[62503]: DEBUG nova.compute.claims [None req-6127b9b5-a976-4771-896a-db01ec307bda tempest-ImagesOneServerTestJSON-1249934967 tempest-ImagesOneServerTestJSON-1249934967-project-member] [instance: 1a27278b-b930-4432-90f2-45cdf025c83e] Aborting claim: {{(pid=62503) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 616.064317] env[62503]: DEBUG oslo_concurrency.lockutils [None req-6127b9b5-a976-4771-896a-db01ec307bda tempest-ImagesOneServerTestJSON-1249934967 tempest-ImagesOneServerTestJSON-1249934967-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 616.180055] env[62503]: INFO nova.scheduler.client.report [None req-14a931ce-672d-410b-8e2f-8af21632175d tempest-VolumesAdminNegativeTest-585860034 tempest-VolumesAdminNegativeTest-585860034-project-member] Deleted allocations for instance 59fd4a4a-20f5-4b8f-970a-acfc882f45a3 [ 616.274847] env[62503]: DEBUG nova.network.neutron [None req-733aadb7-44ff-4603-9cb7-3c04ddeea34f tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] [instance: 30befad4-aacb-44d5-87ed-4fc6b0e34bd6] Successfully created port: 4cefe5dc-a25b-4540-9171-c28eb6b58b3f {{(pid=62503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 616.429299] env[62503]: DEBUG oslo_concurrency.lockutils [None req-2ad81782-4e29-40f3-9961-e4f97d1aa36a tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.010s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 616.430490] env[62503]: DEBUG oslo_concurrency.lockutils [None req-3c8eac85-f4ca-46c6-a47f-7220e201d8ac tempest-FloatingIPsAssociationTestJSON-190004196 tempest-FloatingIPsAssociationTestJSON-190004196-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.830s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 616.431916] env[62503]: INFO nova.compute.claims [None req-3c8eac85-f4ca-46c6-a47f-7220e201d8ac tempest-FloatingIPsAssociationTestJSON-190004196 tempest-FloatingIPsAssociationTestJSON-190004196-project-member] [instance: a8d8e232-6096-4da3-8f2c-65a5e5f713ae] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 616.435181] env[62503]: DEBUG nova.compute.manager [None req-733aadb7-44ff-4603-9cb7-3c04ddeea34f tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] [instance: 30befad4-aacb-44d5-87ed-4fc6b0e34bd6] Start building block device mappings for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2867}} [ 616.691983] env[62503]: DEBUG oslo_concurrency.lockutils [None req-14a931ce-672d-410b-8e2f-8af21632175d tempest-VolumesAdminNegativeTest-585860034 tempest-VolumesAdminNegativeTest-585860034-project-member] Lock "59fd4a4a-20f5-4b8f-970a-acfc882f45a3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 68.608s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 617.194710] env[62503]: DEBUG nova.compute.manager [None req-2f620b9e-0fbe-4cd8-a880-2520c6ad4568 tempest-DeleteServersAdminTestJSON-318637377 tempest-DeleteServersAdminTestJSON-318637377-project-member] [instance: 73142a3f-3be8-4956-90f8-6ca223d2d01f] Starting instance... {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 617.201594] env[62503]: DEBUG nova.compute.manager [req-d22ad178-1c14-48d9-8cb4-85daac2b8bcc req-21420147-9a6b-4ee3-85c8-6a9052587fbf service nova] [instance: 30befad4-aacb-44d5-87ed-4fc6b0e34bd6] Received event network-changed-4cefe5dc-a25b-4540-9171-c28eb6b58b3f {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 617.201594] env[62503]: DEBUG nova.compute.manager [req-d22ad178-1c14-48d9-8cb4-85daac2b8bcc req-21420147-9a6b-4ee3-85c8-6a9052587fbf service nova] [instance: 30befad4-aacb-44d5-87ed-4fc6b0e34bd6] Refreshing instance network info cache due to event network-changed-4cefe5dc-a25b-4540-9171-c28eb6b58b3f. {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11432}} [ 617.201594] env[62503]: DEBUG oslo_concurrency.lockutils [req-d22ad178-1c14-48d9-8cb4-85daac2b8bcc req-21420147-9a6b-4ee3-85c8-6a9052587fbf service nova] Acquiring lock "refresh_cache-30befad4-aacb-44d5-87ed-4fc6b0e34bd6" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 617.201594] env[62503]: DEBUG oslo_concurrency.lockutils [req-d22ad178-1c14-48d9-8cb4-85daac2b8bcc req-21420147-9a6b-4ee3-85c8-6a9052587fbf service nova] Acquired lock "refresh_cache-30befad4-aacb-44d5-87ed-4fc6b0e34bd6" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 617.201594] env[62503]: DEBUG nova.network.neutron [req-d22ad178-1c14-48d9-8cb4-85daac2b8bcc req-21420147-9a6b-4ee3-85c8-6a9052587fbf service nova] [instance: 30befad4-aacb-44d5-87ed-4fc6b0e34bd6] Refreshing network info cache for port 4cefe5dc-a25b-4540-9171-c28eb6b58b3f {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 617.449809] env[62503]: DEBUG nova.compute.manager [None req-733aadb7-44ff-4603-9cb7-3c04ddeea34f tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] [instance: 30befad4-aacb-44d5-87ed-4fc6b0e34bd6] Start spawning the instance on the hypervisor. {{(pid=62503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2641}} [ 617.482516] env[62503]: DEBUG nova.virt.hardware [None req-733aadb7-44ff-4603-9cb7-3c04ddeea34f tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:26:20Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:26:03Z,direct_url=,disk_format='vmdk',id=8150ca02-f879-471d-8913-459408f127a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26d9ee75d25b4018b8bb1fb11c8bd98c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:26:04Z,virtual_size=,visibility=), allow threads: False {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 617.482813] env[62503]: DEBUG nova.virt.hardware [None req-733aadb7-44ff-4603-9cb7-3c04ddeea34f tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] Flavor limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 617.483028] env[62503]: DEBUG nova.virt.hardware [None req-733aadb7-44ff-4603-9cb7-3c04ddeea34f tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] Image limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 617.483262] env[62503]: DEBUG nova.virt.hardware [None req-733aadb7-44ff-4603-9cb7-3c04ddeea34f tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] Flavor pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 617.483448] env[62503]: DEBUG nova.virt.hardware [None req-733aadb7-44ff-4603-9cb7-3c04ddeea34f tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] Image pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 617.483635] env[62503]: DEBUG nova.virt.hardware [None req-733aadb7-44ff-4603-9cb7-3c04ddeea34f tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 617.483881] env[62503]: DEBUG nova.virt.hardware [None req-733aadb7-44ff-4603-9cb7-3c04ddeea34f tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 617.484103] env[62503]: DEBUG nova.virt.hardware [None req-733aadb7-44ff-4603-9cb7-3c04ddeea34f tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 617.484326] env[62503]: DEBUG nova.virt.hardware [None req-733aadb7-44ff-4603-9cb7-3c04ddeea34f tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] Got 1 possible topologies {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 617.484516] env[62503]: DEBUG nova.virt.hardware [None req-733aadb7-44ff-4603-9cb7-3c04ddeea34f tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 617.485187] env[62503]: DEBUG nova.virt.hardware [None req-733aadb7-44ff-4603-9cb7-3c04ddeea34f tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 617.485967] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d64a093a-aeec-45e7-8a29-4f5b60eb038a {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.494768] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2aa06f7c-3a65-45d4-ae52-815fd2499055 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.510580] env[62503]: ERROR nova.compute.manager [None req-733aadb7-44ff-4603-9cb7-3c04ddeea34f tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 4cefe5dc-a25b-4540-9171-c28eb6b58b3f, please check neutron logs for more information. [ 617.510580] env[62503]: ERROR nova.compute.manager Traceback (most recent call last): [ 617.510580] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 617.510580] env[62503]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 617.510580] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 617.510580] env[62503]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 617.510580] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 617.510580] env[62503]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 617.510580] env[62503]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 617.510580] env[62503]: ERROR nova.compute.manager self.force_reraise() [ 617.510580] env[62503]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 617.510580] env[62503]: ERROR nova.compute.manager raise self.value [ 617.510580] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 617.510580] env[62503]: ERROR nova.compute.manager updated_port = self._update_port( [ 617.510580] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 617.510580] env[62503]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 617.511104] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 617.511104] env[62503]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 617.511104] env[62503]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 4cefe5dc-a25b-4540-9171-c28eb6b58b3f, please check neutron logs for more information. [ 617.511104] env[62503]: ERROR nova.compute.manager [ 617.511104] env[62503]: Traceback (most recent call last): [ 617.511104] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 617.511104] env[62503]: listener.cb(fileno) [ 617.511104] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 617.511104] env[62503]: result = function(*args, **kwargs) [ 617.511104] env[62503]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 617.511104] env[62503]: return func(*args, **kwargs) [ 617.511104] env[62503]: File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 617.511104] env[62503]: raise e [ 617.511104] env[62503]: File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 617.511104] env[62503]: nwinfo = self.network_api.allocate_for_instance( [ 617.511104] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 617.511104] env[62503]: created_port_ids = self._update_ports_for_instance( [ 617.511104] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 617.511104] env[62503]: with excutils.save_and_reraise_exception(): [ 617.511104] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 617.511104] env[62503]: self.force_reraise() [ 617.511104] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 617.511104] env[62503]: raise self.value [ 617.511104] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 617.511104] env[62503]: updated_port = self._update_port( [ 617.511104] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 617.511104] env[62503]: _ensure_no_port_binding_failure(port) [ 617.511104] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 617.511104] env[62503]: raise exception.PortBindingFailed(port_id=port['id']) [ 617.512394] env[62503]: nova.exception.PortBindingFailed: Binding failed for port 4cefe5dc-a25b-4540-9171-c28eb6b58b3f, please check neutron logs for more information. [ 617.512394] env[62503]: Removing descriptor: 14 [ 617.512394] env[62503]: ERROR nova.compute.manager [None req-733aadb7-44ff-4603-9cb7-3c04ddeea34f tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] [instance: 30befad4-aacb-44d5-87ed-4fc6b0e34bd6] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 4cefe5dc-a25b-4540-9171-c28eb6b58b3f, please check neutron logs for more information. [ 617.512394] env[62503]: ERROR nova.compute.manager [instance: 30befad4-aacb-44d5-87ed-4fc6b0e34bd6] Traceback (most recent call last): [ 617.512394] env[62503]: ERROR nova.compute.manager [instance: 30befad4-aacb-44d5-87ed-4fc6b0e34bd6] File "/opt/stack/nova/nova/compute/manager.py", line 2897, in _build_resources [ 617.512394] env[62503]: ERROR nova.compute.manager [instance: 30befad4-aacb-44d5-87ed-4fc6b0e34bd6] yield resources [ 617.512394] env[62503]: ERROR nova.compute.manager [instance: 30befad4-aacb-44d5-87ed-4fc6b0e34bd6] File "/opt/stack/nova/nova/compute/manager.py", line 2644, in _build_and_run_instance [ 617.512394] env[62503]: ERROR nova.compute.manager [instance: 30befad4-aacb-44d5-87ed-4fc6b0e34bd6] self.driver.spawn(context, instance, image_meta, [ 617.512394] env[62503]: ERROR nova.compute.manager [instance: 30befad4-aacb-44d5-87ed-4fc6b0e34bd6] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 617.512394] env[62503]: ERROR nova.compute.manager [instance: 30befad4-aacb-44d5-87ed-4fc6b0e34bd6] self._vmops.spawn(context, instance, image_meta, injected_files, [ 617.512394] env[62503]: ERROR nova.compute.manager [instance: 30befad4-aacb-44d5-87ed-4fc6b0e34bd6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 617.512394] env[62503]: ERROR nova.compute.manager [instance: 30befad4-aacb-44d5-87ed-4fc6b0e34bd6] vm_ref = self.build_virtual_machine(instance, [ 617.512871] env[62503]: ERROR nova.compute.manager [instance: 30befad4-aacb-44d5-87ed-4fc6b0e34bd6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 617.512871] env[62503]: ERROR nova.compute.manager [instance: 30befad4-aacb-44d5-87ed-4fc6b0e34bd6] vif_infos = vmwarevif.get_vif_info(self._session, [ 617.512871] env[62503]: ERROR nova.compute.manager [instance: 30befad4-aacb-44d5-87ed-4fc6b0e34bd6] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 617.512871] env[62503]: ERROR nova.compute.manager [instance: 30befad4-aacb-44d5-87ed-4fc6b0e34bd6] for vif in network_info: [ 617.512871] env[62503]: ERROR nova.compute.manager [instance: 30befad4-aacb-44d5-87ed-4fc6b0e34bd6] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 617.512871] env[62503]: ERROR nova.compute.manager [instance: 30befad4-aacb-44d5-87ed-4fc6b0e34bd6] return self._sync_wrapper(fn, *args, **kwargs) [ 617.512871] env[62503]: ERROR nova.compute.manager [instance: 30befad4-aacb-44d5-87ed-4fc6b0e34bd6] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 617.512871] env[62503]: ERROR nova.compute.manager [instance: 30befad4-aacb-44d5-87ed-4fc6b0e34bd6] self.wait() [ 617.512871] env[62503]: ERROR nova.compute.manager [instance: 30befad4-aacb-44d5-87ed-4fc6b0e34bd6] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 617.512871] env[62503]: ERROR nova.compute.manager [instance: 30befad4-aacb-44d5-87ed-4fc6b0e34bd6] self[:] = self._gt.wait() [ 617.512871] env[62503]: ERROR nova.compute.manager [instance: 30befad4-aacb-44d5-87ed-4fc6b0e34bd6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 617.512871] env[62503]: ERROR nova.compute.manager [instance: 30befad4-aacb-44d5-87ed-4fc6b0e34bd6] return self._exit_event.wait() [ 617.512871] env[62503]: ERROR nova.compute.manager [instance: 30befad4-aacb-44d5-87ed-4fc6b0e34bd6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 617.513235] env[62503]: ERROR nova.compute.manager [instance: 30befad4-aacb-44d5-87ed-4fc6b0e34bd6] result = hub.switch() [ 617.513235] env[62503]: ERROR nova.compute.manager [instance: 30befad4-aacb-44d5-87ed-4fc6b0e34bd6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 617.513235] env[62503]: ERROR nova.compute.manager [instance: 30befad4-aacb-44d5-87ed-4fc6b0e34bd6] return self.greenlet.switch() [ 617.513235] env[62503]: ERROR nova.compute.manager [instance: 30befad4-aacb-44d5-87ed-4fc6b0e34bd6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 617.513235] env[62503]: ERROR nova.compute.manager [instance: 30befad4-aacb-44d5-87ed-4fc6b0e34bd6] result = function(*args, **kwargs) [ 617.513235] env[62503]: ERROR nova.compute.manager [instance: 30befad4-aacb-44d5-87ed-4fc6b0e34bd6] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 617.513235] env[62503]: ERROR nova.compute.manager [instance: 30befad4-aacb-44d5-87ed-4fc6b0e34bd6] return func(*args, **kwargs) [ 617.513235] env[62503]: ERROR nova.compute.manager [instance: 30befad4-aacb-44d5-87ed-4fc6b0e34bd6] File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 617.513235] env[62503]: ERROR nova.compute.manager [instance: 30befad4-aacb-44d5-87ed-4fc6b0e34bd6] raise e [ 617.513235] env[62503]: ERROR nova.compute.manager [instance: 30befad4-aacb-44d5-87ed-4fc6b0e34bd6] File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 617.513235] env[62503]: ERROR nova.compute.manager [instance: 30befad4-aacb-44d5-87ed-4fc6b0e34bd6] nwinfo = self.network_api.allocate_for_instance( [ 617.513235] env[62503]: ERROR nova.compute.manager [instance: 30befad4-aacb-44d5-87ed-4fc6b0e34bd6] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 617.513235] env[62503]: ERROR nova.compute.manager [instance: 30befad4-aacb-44d5-87ed-4fc6b0e34bd6] created_port_ids = self._update_ports_for_instance( [ 617.513629] env[62503]: ERROR nova.compute.manager [instance: 30befad4-aacb-44d5-87ed-4fc6b0e34bd6] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 617.513629] env[62503]: ERROR nova.compute.manager [instance: 30befad4-aacb-44d5-87ed-4fc6b0e34bd6] with excutils.save_and_reraise_exception(): [ 617.513629] env[62503]: ERROR nova.compute.manager [instance: 30befad4-aacb-44d5-87ed-4fc6b0e34bd6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 617.513629] env[62503]: ERROR nova.compute.manager [instance: 30befad4-aacb-44d5-87ed-4fc6b0e34bd6] self.force_reraise() [ 617.513629] env[62503]: ERROR nova.compute.manager [instance: 30befad4-aacb-44d5-87ed-4fc6b0e34bd6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 617.513629] env[62503]: ERROR nova.compute.manager [instance: 30befad4-aacb-44d5-87ed-4fc6b0e34bd6] raise self.value [ 617.513629] env[62503]: ERROR nova.compute.manager [instance: 30befad4-aacb-44d5-87ed-4fc6b0e34bd6] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 617.513629] env[62503]: ERROR nova.compute.manager [instance: 30befad4-aacb-44d5-87ed-4fc6b0e34bd6] updated_port = self._update_port( [ 617.513629] env[62503]: ERROR nova.compute.manager [instance: 30befad4-aacb-44d5-87ed-4fc6b0e34bd6] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 617.513629] env[62503]: ERROR nova.compute.manager [instance: 30befad4-aacb-44d5-87ed-4fc6b0e34bd6] _ensure_no_port_binding_failure(port) [ 617.513629] env[62503]: ERROR nova.compute.manager [instance: 30befad4-aacb-44d5-87ed-4fc6b0e34bd6] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 617.513629] env[62503]: ERROR nova.compute.manager [instance: 30befad4-aacb-44d5-87ed-4fc6b0e34bd6] raise exception.PortBindingFailed(port_id=port['id']) [ 617.513924] env[62503]: ERROR nova.compute.manager [instance: 30befad4-aacb-44d5-87ed-4fc6b0e34bd6] nova.exception.PortBindingFailed: Binding failed for port 4cefe5dc-a25b-4540-9171-c28eb6b58b3f, please check neutron logs for more information. [ 617.513924] env[62503]: ERROR nova.compute.manager [instance: 30befad4-aacb-44d5-87ed-4fc6b0e34bd6] [ 617.513924] env[62503]: INFO nova.compute.manager [None req-733aadb7-44ff-4603-9cb7-3c04ddeea34f tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] [instance: 30befad4-aacb-44d5-87ed-4fc6b0e34bd6] Terminating instance [ 617.516580] env[62503]: DEBUG oslo_concurrency.lockutils [None req-733aadb7-44ff-4603-9cb7-3c04ddeea34f tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] Acquiring lock "refresh_cache-30befad4-aacb-44d5-87ed-4fc6b0e34bd6" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 617.719424] env[62503]: DEBUG oslo_concurrency.lockutils [None req-2f620b9e-0fbe-4cd8-a880-2520c6ad4568 tempest-DeleteServersAdminTestJSON-318637377 tempest-DeleteServersAdminTestJSON-318637377-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 617.728223] env[62503]: DEBUG nova.network.neutron [req-d22ad178-1c14-48d9-8cb4-85daac2b8bcc req-21420147-9a6b-4ee3-85c8-6a9052587fbf service nova] [instance: 30befad4-aacb-44d5-87ed-4fc6b0e34bd6] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 617.831995] env[62503]: DEBUG nova.network.neutron [req-d22ad178-1c14-48d9-8cb4-85daac2b8bcc req-21420147-9a6b-4ee3-85c8-6a9052587fbf service nova] [instance: 30befad4-aacb-44d5-87ed-4fc6b0e34bd6] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 617.837142] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99e60a88-cb61-48aa-9a88-57740258c9d5 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.845892] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa829a10-0f8b-4669-a49c-912d971ec054 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.878463] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40c033b9-f086-427a-abb6-4d98c2cf939f {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.886409] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a80eb0b0-739d-4183-957a-0a57578c6d4c {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.901606] env[62503]: DEBUG nova.compute.provider_tree [None req-3c8eac85-f4ca-46c6-a47f-7220e201d8ac tempest-FloatingIPsAssociationTestJSON-190004196 tempest-FloatingIPsAssociationTestJSON-190004196-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 618.166523] env[62503]: DEBUG oslo_concurrency.lockutils [None req-ef39e50f-5ce8-4286-9c89-acf2f5f9a4d4 tempest-VolumesAdminNegativeTest-585860034 tempest-VolumesAdminNegativeTest-585860034-project-member] Acquiring lock "0b40f385-db0a-460c-b7fd-47e4d6afbaf9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 618.166755] env[62503]: DEBUG oslo_concurrency.lockutils [None req-ef39e50f-5ce8-4286-9c89-acf2f5f9a4d4 tempest-VolumesAdminNegativeTest-585860034 tempest-VolumesAdminNegativeTest-585860034-project-member] Lock "0b40f385-db0a-460c-b7fd-47e4d6afbaf9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 618.334865] env[62503]: DEBUG oslo_concurrency.lockutils [req-d22ad178-1c14-48d9-8cb4-85daac2b8bcc req-21420147-9a6b-4ee3-85c8-6a9052587fbf service nova] Releasing lock "refresh_cache-30befad4-aacb-44d5-87ed-4fc6b0e34bd6" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 618.335296] env[62503]: DEBUG oslo_concurrency.lockutils [None req-733aadb7-44ff-4603-9cb7-3c04ddeea34f tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] Acquired lock "refresh_cache-30befad4-aacb-44d5-87ed-4fc6b0e34bd6" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 618.335482] env[62503]: DEBUG nova.network.neutron [None req-733aadb7-44ff-4603-9cb7-3c04ddeea34f tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] [instance: 30befad4-aacb-44d5-87ed-4fc6b0e34bd6] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 618.404814] env[62503]: DEBUG nova.scheduler.client.report [None req-3c8eac85-f4ca-46c6-a47f-7220e201d8ac tempest-FloatingIPsAssociationTestJSON-190004196 tempest-FloatingIPsAssociationTestJSON-190004196-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 618.853897] env[62503]: DEBUG nova.network.neutron [None req-733aadb7-44ff-4603-9cb7-3c04ddeea34f tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] [instance: 30befad4-aacb-44d5-87ed-4fc6b0e34bd6] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 618.908516] env[62503]: DEBUG oslo_concurrency.lockutils [None req-3c8eac85-f4ca-46c6-a47f-7220e201d8ac tempest-FloatingIPsAssociationTestJSON-190004196 tempest-FloatingIPsAssociationTestJSON-190004196-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.478s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 618.909018] env[62503]: DEBUG nova.compute.manager [None req-3c8eac85-f4ca-46c6-a47f-7220e201d8ac tempest-FloatingIPsAssociationTestJSON-190004196 tempest-FloatingIPsAssociationTestJSON-190004196-project-member] [instance: a8d8e232-6096-4da3-8f2c-65a5e5f713ae] Start building networks asynchronously for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2832}} [ 618.911698] env[62503]: DEBUG oslo_concurrency.lockutils [None req-c5889fb0-1c0e-4d03-b6a4-67f76941137f tempest-ServersTestFqdnHostnames-1580407543 tempest-ServersTestFqdnHostnames-1580407543-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 23.071s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 618.947885] env[62503]: DEBUG nova.network.neutron [None req-733aadb7-44ff-4603-9cb7-3c04ddeea34f tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] [instance: 30befad4-aacb-44d5-87ed-4fc6b0e34bd6] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 619.225882] env[62503]: DEBUG nova.compute.manager [req-66fd1fdf-3cdb-4e76-a8e2-c8049d848dae req-89a93b98-14bd-4eca-b988-29645cc77a40 service nova] [instance: 30befad4-aacb-44d5-87ed-4fc6b0e34bd6] Received event network-vif-deleted-4cefe5dc-a25b-4540-9171-c28eb6b58b3f {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 619.416810] env[62503]: DEBUG nova.compute.utils [None req-3c8eac85-f4ca-46c6-a47f-7220e201d8ac tempest-FloatingIPsAssociationTestJSON-190004196 tempest-FloatingIPsAssociationTestJSON-190004196-project-member] Using /dev/sd instead of None {{(pid=62503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 619.421513] env[62503]: DEBUG nova.compute.manager [None req-3c8eac85-f4ca-46c6-a47f-7220e201d8ac tempest-FloatingIPsAssociationTestJSON-190004196 tempest-FloatingIPsAssociationTestJSON-190004196-project-member] [instance: a8d8e232-6096-4da3-8f2c-65a5e5f713ae] Allocating IP information in the background. {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 619.421863] env[62503]: DEBUG nova.network.neutron [None req-3c8eac85-f4ca-46c6-a47f-7220e201d8ac tempest-FloatingIPsAssociationTestJSON-190004196 tempest-FloatingIPsAssociationTestJSON-190004196-project-member] [instance: a8d8e232-6096-4da3-8f2c-65a5e5f713ae] allocate_for_instance() {{(pid=62503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 619.450747] env[62503]: DEBUG oslo_concurrency.lockutils [None req-733aadb7-44ff-4603-9cb7-3c04ddeea34f tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] Releasing lock "refresh_cache-30befad4-aacb-44d5-87ed-4fc6b0e34bd6" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 619.451196] env[62503]: DEBUG nova.compute.manager [None req-733aadb7-44ff-4603-9cb7-3c04ddeea34f tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] [instance: 30befad4-aacb-44d5-87ed-4fc6b0e34bd6] Start destroying the instance on the hypervisor. {{(pid=62503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3156}} [ 619.451328] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-733aadb7-44ff-4603-9cb7-3c04ddeea34f tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] [instance: 30befad4-aacb-44d5-87ed-4fc6b0e34bd6] Destroying instance {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 619.451650] env[62503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4f829a9a-d174-4451-bab5-c5d637357a8c {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.461460] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-330aacac-8863-4146-a134-2ef0426aba35 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.474767] env[62503]: DEBUG nova.policy [None req-3c8eac85-f4ca-46c6-a47f-7220e201d8ac tempest-FloatingIPsAssociationTestJSON-190004196 tempest-FloatingIPsAssociationTestJSON-190004196-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '968c6e00caf74c4fa8e23b90f48531b7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b396898c239d4bbaa16b1b204c4924fa', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62503) authorize /opt/stack/nova/nova/policy.py:201}} [ 619.492921] env[62503]: WARNING nova.virt.vmwareapi.vmops [None req-733aadb7-44ff-4603-9cb7-3c04ddeea34f tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] [instance: 30befad4-aacb-44d5-87ed-4fc6b0e34bd6] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 30befad4-aacb-44d5-87ed-4fc6b0e34bd6 could not be found. [ 619.493168] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-733aadb7-44ff-4603-9cb7-3c04ddeea34f tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] [instance: 30befad4-aacb-44d5-87ed-4fc6b0e34bd6] Instance destroyed {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 619.493353] env[62503]: INFO nova.compute.manager [None req-733aadb7-44ff-4603-9cb7-3c04ddeea34f tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] [instance: 30befad4-aacb-44d5-87ed-4fc6b0e34bd6] Took 0.04 seconds to destroy the instance on the hypervisor. [ 619.493588] env[62503]: DEBUG oslo.service.loopingcall [None req-733aadb7-44ff-4603-9cb7-3c04ddeea34f tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 619.493798] env[62503]: DEBUG nova.compute.manager [-] [instance: 30befad4-aacb-44d5-87ed-4fc6b0e34bd6] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 619.493889] env[62503]: DEBUG nova.network.neutron [-] [instance: 30befad4-aacb-44d5-87ed-4fc6b0e34bd6] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 619.509970] env[62503]: DEBUG nova.network.neutron [-] [instance: 30befad4-aacb-44d5-87ed-4fc6b0e34bd6] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 619.784975] env[62503]: DEBUG nova.network.neutron [None req-3c8eac85-f4ca-46c6-a47f-7220e201d8ac tempest-FloatingIPsAssociationTestJSON-190004196 tempest-FloatingIPsAssociationTestJSON-190004196-project-member] [instance: a8d8e232-6096-4da3-8f2c-65a5e5f713ae] Successfully created port: f8cd1af9-b011-4670-86ad-4792342fda84 {{(pid=62503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 619.792568] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57ba4a9a-bd17-4376-82c1-5be7343edcf2 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.802826] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30b2d336-4171-43af-bad0-00bc3852833c {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.839339] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29ab5188-c8c5-4df9-aa91-60ca347af92d {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.850027] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc7ce438-34da-4015-b144-b078cd4ff4c9 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.865824] env[62503]: DEBUG nova.compute.provider_tree [None req-c5889fb0-1c0e-4d03-b6a4-67f76941137f tempest-ServersTestFqdnHostnames-1580407543 tempest-ServersTestFqdnHostnames-1580407543-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 619.921664] env[62503]: DEBUG nova.compute.manager [None req-3c8eac85-f4ca-46c6-a47f-7220e201d8ac tempest-FloatingIPsAssociationTestJSON-190004196 tempest-FloatingIPsAssociationTestJSON-190004196-project-member] [instance: a8d8e232-6096-4da3-8f2c-65a5e5f713ae] Start building block device mappings for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2867}} [ 620.013867] env[62503]: DEBUG nova.network.neutron [-] [instance: 30befad4-aacb-44d5-87ed-4fc6b0e34bd6] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 620.368338] env[62503]: DEBUG nova.scheduler.client.report [None req-c5889fb0-1c0e-4d03-b6a4-67f76941137f tempest-ServersTestFqdnHostnames-1580407543 tempest-ServersTestFqdnHostnames-1580407543-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 620.516494] env[62503]: INFO nova.compute.manager [-] [instance: 30befad4-aacb-44d5-87ed-4fc6b0e34bd6] Took 1.02 seconds to deallocate network for instance. [ 620.518797] env[62503]: DEBUG nova.compute.claims [None req-733aadb7-44ff-4603-9cb7-3c04ddeea34f tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] [instance: 30befad4-aacb-44d5-87ed-4fc6b0e34bd6] Aborting claim: {{(pid=62503) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 620.518974] env[62503]: DEBUG oslo_concurrency.lockutils [None req-733aadb7-44ff-4603-9cb7-3c04ddeea34f tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 620.719612] env[62503]: ERROR nova.compute.manager [None req-3c8eac85-f4ca-46c6-a47f-7220e201d8ac tempest-FloatingIPsAssociationTestJSON-190004196 tempest-FloatingIPsAssociationTestJSON-190004196-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port f8cd1af9-b011-4670-86ad-4792342fda84, please check neutron logs for more information. [ 620.719612] env[62503]: ERROR nova.compute.manager Traceback (most recent call last): [ 620.719612] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 620.719612] env[62503]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 620.719612] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 620.719612] env[62503]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 620.719612] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 620.719612] env[62503]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 620.719612] env[62503]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 620.719612] env[62503]: ERROR nova.compute.manager self.force_reraise() [ 620.719612] env[62503]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 620.719612] env[62503]: ERROR nova.compute.manager raise self.value [ 620.719612] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 620.719612] env[62503]: ERROR nova.compute.manager updated_port = self._update_port( [ 620.719612] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 620.719612] env[62503]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 620.720212] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 620.720212] env[62503]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 620.720212] env[62503]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port f8cd1af9-b011-4670-86ad-4792342fda84, please check neutron logs for more information. [ 620.720212] env[62503]: ERROR nova.compute.manager [ 620.720212] env[62503]: Traceback (most recent call last): [ 620.720212] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 620.720212] env[62503]: listener.cb(fileno) [ 620.720212] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 620.720212] env[62503]: result = function(*args, **kwargs) [ 620.720212] env[62503]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 620.720212] env[62503]: return func(*args, **kwargs) [ 620.720212] env[62503]: File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 620.720212] env[62503]: raise e [ 620.720212] env[62503]: File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 620.720212] env[62503]: nwinfo = self.network_api.allocate_for_instance( [ 620.720212] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 620.720212] env[62503]: created_port_ids = self._update_ports_for_instance( [ 620.720212] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 620.720212] env[62503]: with excutils.save_and_reraise_exception(): [ 620.720212] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 620.720212] env[62503]: self.force_reraise() [ 620.720212] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 620.720212] env[62503]: raise self.value [ 620.720212] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 620.720212] env[62503]: updated_port = self._update_port( [ 620.720212] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 620.720212] env[62503]: _ensure_no_port_binding_failure(port) [ 620.720212] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 620.720212] env[62503]: raise exception.PortBindingFailed(port_id=port['id']) [ 620.720940] env[62503]: nova.exception.PortBindingFailed: Binding failed for port f8cd1af9-b011-4670-86ad-4792342fda84, please check neutron logs for more information. [ 620.720940] env[62503]: Removing descriptor: 14 [ 620.873787] env[62503]: DEBUG oslo_concurrency.lockutils [None req-c5889fb0-1c0e-4d03-b6a4-67f76941137f tempest-ServersTestFqdnHostnames-1580407543 tempest-ServersTestFqdnHostnames-1580407543-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.962s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 620.874359] env[62503]: ERROR nova.compute.manager [None req-c5889fb0-1c0e-4d03-b6a4-67f76941137f tempest-ServersTestFqdnHostnames-1580407543 tempest-ServersTestFqdnHostnames-1580407543-project-member] [instance: 7a0b2744-2bb0-4eee-9861-418ba67b719c] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 8ce7fb1c-b76d-48fe-be4f-bb2287adce87, please check neutron logs for more information. [ 620.874359] env[62503]: ERROR nova.compute.manager [instance: 7a0b2744-2bb0-4eee-9861-418ba67b719c] Traceback (most recent call last): [ 620.874359] env[62503]: ERROR nova.compute.manager [instance: 7a0b2744-2bb0-4eee-9861-418ba67b719c] File "/opt/stack/nova/nova/compute/manager.py", line 2644, in _build_and_run_instance [ 620.874359] env[62503]: ERROR nova.compute.manager [instance: 7a0b2744-2bb0-4eee-9861-418ba67b719c] self.driver.spawn(context, instance, image_meta, [ 620.874359] env[62503]: ERROR nova.compute.manager [instance: 7a0b2744-2bb0-4eee-9861-418ba67b719c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 620.874359] env[62503]: ERROR nova.compute.manager [instance: 7a0b2744-2bb0-4eee-9861-418ba67b719c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 620.874359] env[62503]: ERROR nova.compute.manager [instance: 7a0b2744-2bb0-4eee-9861-418ba67b719c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 620.874359] env[62503]: ERROR nova.compute.manager [instance: 7a0b2744-2bb0-4eee-9861-418ba67b719c] vm_ref = self.build_virtual_machine(instance, [ 620.874359] env[62503]: ERROR nova.compute.manager [instance: 7a0b2744-2bb0-4eee-9861-418ba67b719c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 620.874359] env[62503]: ERROR nova.compute.manager [instance: 7a0b2744-2bb0-4eee-9861-418ba67b719c] vif_infos = vmwarevif.get_vif_info(self._session, [ 620.874359] env[62503]: ERROR nova.compute.manager [instance: 7a0b2744-2bb0-4eee-9861-418ba67b719c] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 620.874663] env[62503]: ERROR nova.compute.manager [instance: 7a0b2744-2bb0-4eee-9861-418ba67b719c] for vif in network_info: [ 620.874663] env[62503]: ERROR nova.compute.manager [instance: 7a0b2744-2bb0-4eee-9861-418ba67b719c] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 620.874663] env[62503]: ERROR nova.compute.manager [instance: 7a0b2744-2bb0-4eee-9861-418ba67b719c] return self._sync_wrapper(fn, *args, **kwargs) [ 620.874663] env[62503]: ERROR nova.compute.manager [instance: 7a0b2744-2bb0-4eee-9861-418ba67b719c] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 620.874663] env[62503]: ERROR nova.compute.manager [instance: 7a0b2744-2bb0-4eee-9861-418ba67b719c] self.wait() [ 620.874663] env[62503]: ERROR nova.compute.manager [instance: 7a0b2744-2bb0-4eee-9861-418ba67b719c] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 620.874663] env[62503]: ERROR nova.compute.manager [instance: 7a0b2744-2bb0-4eee-9861-418ba67b719c] self[:] = self._gt.wait() [ 620.874663] env[62503]: ERROR nova.compute.manager [instance: 7a0b2744-2bb0-4eee-9861-418ba67b719c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 620.874663] env[62503]: ERROR nova.compute.manager [instance: 7a0b2744-2bb0-4eee-9861-418ba67b719c] return self._exit_event.wait() [ 620.874663] env[62503]: ERROR nova.compute.manager [instance: 7a0b2744-2bb0-4eee-9861-418ba67b719c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 620.874663] env[62503]: ERROR nova.compute.manager [instance: 7a0b2744-2bb0-4eee-9861-418ba67b719c] result = hub.switch() [ 620.874663] env[62503]: ERROR nova.compute.manager [instance: 7a0b2744-2bb0-4eee-9861-418ba67b719c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 620.874663] env[62503]: ERROR nova.compute.manager [instance: 7a0b2744-2bb0-4eee-9861-418ba67b719c] return self.greenlet.switch() [ 620.874954] env[62503]: ERROR nova.compute.manager [instance: 7a0b2744-2bb0-4eee-9861-418ba67b719c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 620.874954] env[62503]: ERROR nova.compute.manager [instance: 7a0b2744-2bb0-4eee-9861-418ba67b719c] result = function(*args, **kwargs) [ 620.874954] env[62503]: ERROR nova.compute.manager [instance: 7a0b2744-2bb0-4eee-9861-418ba67b719c] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 620.874954] env[62503]: ERROR nova.compute.manager [instance: 7a0b2744-2bb0-4eee-9861-418ba67b719c] return func(*args, **kwargs) [ 620.874954] env[62503]: ERROR nova.compute.manager [instance: 7a0b2744-2bb0-4eee-9861-418ba67b719c] File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 620.874954] env[62503]: ERROR nova.compute.manager [instance: 7a0b2744-2bb0-4eee-9861-418ba67b719c] raise e [ 620.874954] env[62503]: ERROR nova.compute.manager [instance: 7a0b2744-2bb0-4eee-9861-418ba67b719c] File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 620.874954] env[62503]: ERROR nova.compute.manager [instance: 7a0b2744-2bb0-4eee-9861-418ba67b719c] nwinfo = self.network_api.allocate_for_instance( [ 620.874954] env[62503]: ERROR nova.compute.manager [instance: 7a0b2744-2bb0-4eee-9861-418ba67b719c] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 620.874954] env[62503]: ERROR nova.compute.manager [instance: 7a0b2744-2bb0-4eee-9861-418ba67b719c] created_port_ids = self._update_ports_for_instance( [ 620.874954] env[62503]: ERROR nova.compute.manager [instance: 7a0b2744-2bb0-4eee-9861-418ba67b719c] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 620.874954] env[62503]: ERROR nova.compute.manager [instance: 7a0b2744-2bb0-4eee-9861-418ba67b719c] with excutils.save_and_reraise_exception(): [ 620.874954] env[62503]: ERROR nova.compute.manager [instance: 7a0b2744-2bb0-4eee-9861-418ba67b719c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 620.875252] env[62503]: ERROR nova.compute.manager [instance: 7a0b2744-2bb0-4eee-9861-418ba67b719c] self.force_reraise() [ 620.875252] env[62503]: ERROR nova.compute.manager [instance: 7a0b2744-2bb0-4eee-9861-418ba67b719c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 620.875252] env[62503]: ERROR nova.compute.manager [instance: 7a0b2744-2bb0-4eee-9861-418ba67b719c] raise self.value [ 620.875252] env[62503]: ERROR nova.compute.manager [instance: 7a0b2744-2bb0-4eee-9861-418ba67b719c] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 620.875252] env[62503]: ERROR nova.compute.manager [instance: 7a0b2744-2bb0-4eee-9861-418ba67b719c] updated_port = self._update_port( [ 620.875252] env[62503]: ERROR nova.compute.manager [instance: 7a0b2744-2bb0-4eee-9861-418ba67b719c] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 620.875252] env[62503]: ERROR nova.compute.manager [instance: 7a0b2744-2bb0-4eee-9861-418ba67b719c] _ensure_no_port_binding_failure(port) [ 620.875252] env[62503]: ERROR nova.compute.manager [instance: 7a0b2744-2bb0-4eee-9861-418ba67b719c] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 620.875252] env[62503]: ERROR nova.compute.manager [instance: 7a0b2744-2bb0-4eee-9861-418ba67b719c] raise exception.PortBindingFailed(port_id=port['id']) [ 620.875252] env[62503]: ERROR nova.compute.manager [instance: 7a0b2744-2bb0-4eee-9861-418ba67b719c] nova.exception.PortBindingFailed: Binding failed for port 8ce7fb1c-b76d-48fe-be4f-bb2287adce87, please check neutron logs for more information. [ 620.875252] env[62503]: ERROR nova.compute.manager [instance: 7a0b2744-2bb0-4eee-9861-418ba67b719c] [ 620.875501] env[62503]: DEBUG nova.compute.utils [None req-c5889fb0-1c0e-4d03-b6a4-67f76941137f tempest-ServersTestFqdnHostnames-1580407543 tempest-ServersTestFqdnHostnames-1580407543-project-member] [instance: 7a0b2744-2bb0-4eee-9861-418ba67b719c] Binding failed for port 8ce7fb1c-b76d-48fe-be4f-bb2287adce87, please check neutron logs for more information. {{(pid=62503) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 620.876287] env[62503]: DEBUG oslo_concurrency.lockutils [None req-cc9648da-f367-499f-9ae4-d540c12618f7 tempest-ServerActionsTestJSON-1148358597 tempest-ServerActionsTestJSON-1148358597-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 21.349s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 620.879251] env[62503]: DEBUG nova.compute.manager [None req-c5889fb0-1c0e-4d03-b6a4-67f76941137f tempest-ServersTestFqdnHostnames-1580407543 tempest-ServersTestFqdnHostnames-1580407543-project-member] [instance: 7a0b2744-2bb0-4eee-9861-418ba67b719c] Build of instance 7a0b2744-2bb0-4eee-9861-418ba67b719c was re-scheduled: Binding failed for port 8ce7fb1c-b76d-48fe-be4f-bb2287adce87, please check neutron logs for more information. {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2483}} [ 620.879667] env[62503]: DEBUG nova.compute.manager [None req-c5889fb0-1c0e-4d03-b6a4-67f76941137f tempest-ServersTestFqdnHostnames-1580407543 tempest-ServersTestFqdnHostnames-1580407543-project-member] [instance: 7a0b2744-2bb0-4eee-9861-418ba67b719c] Unplugging VIFs for instance {{(pid=62503) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3009}} [ 620.879883] env[62503]: DEBUG oslo_concurrency.lockutils [None req-c5889fb0-1c0e-4d03-b6a4-67f76941137f tempest-ServersTestFqdnHostnames-1580407543 tempest-ServersTestFqdnHostnames-1580407543-project-member] Acquiring lock "refresh_cache-7a0b2744-2bb0-4eee-9861-418ba67b719c" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 620.880036] env[62503]: DEBUG oslo_concurrency.lockutils [None req-c5889fb0-1c0e-4d03-b6a4-67f76941137f tempest-ServersTestFqdnHostnames-1580407543 tempest-ServersTestFqdnHostnames-1580407543-project-member] Acquired lock "refresh_cache-7a0b2744-2bb0-4eee-9861-418ba67b719c" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 620.880198] env[62503]: DEBUG nova.network.neutron [None req-c5889fb0-1c0e-4d03-b6a4-67f76941137f tempest-ServersTestFqdnHostnames-1580407543 tempest-ServersTestFqdnHostnames-1580407543-project-member] [instance: 7a0b2744-2bb0-4eee-9861-418ba67b719c] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 620.930934] env[62503]: DEBUG nova.compute.manager [None req-3c8eac85-f4ca-46c6-a47f-7220e201d8ac tempest-FloatingIPsAssociationTestJSON-190004196 tempest-FloatingIPsAssociationTestJSON-190004196-project-member] [instance: a8d8e232-6096-4da3-8f2c-65a5e5f713ae] Start spawning the instance on the hypervisor. {{(pid=62503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2641}} [ 620.954795] env[62503]: DEBUG nova.virt.hardware [None req-3c8eac85-f4ca-46c6-a47f-7220e201d8ac tempest-FloatingIPsAssociationTestJSON-190004196 tempest-FloatingIPsAssociationTestJSON-190004196-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:26:20Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:26:03Z,direct_url=,disk_format='vmdk',id=8150ca02-f879-471d-8913-459408f127a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26d9ee75d25b4018b8bb1fb11c8bd98c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:26:04Z,virtual_size=,visibility=), allow threads: False {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 620.955076] env[62503]: DEBUG nova.virt.hardware [None req-3c8eac85-f4ca-46c6-a47f-7220e201d8ac tempest-FloatingIPsAssociationTestJSON-190004196 tempest-FloatingIPsAssociationTestJSON-190004196-project-member] Flavor limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 620.955239] env[62503]: DEBUG nova.virt.hardware [None req-3c8eac85-f4ca-46c6-a47f-7220e201d8ac tempest-FloatingIPsAssociationTestJSON-190004196 tempest-FloatingIPsAssociationTestJSON-190004196-project-member] Image limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 620.955420] env[62503]: DEBUG nova.virt.hardware [None req-3c8eac85-f4ca-46c6-a47f-7220e201d8ac tempest-FloatingIPsAssociationTestJSON-190004196 tempest-FloatingIPsAssociationTestJSON-190004196-project-member] Flavor pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 620.955564] env[62503]: DEBUG nova.virt.hardware [None req-3c8eac85-f4ca-46c6-a47f-7220e201d8ac tempest-FloatingIPsAssociationTestJSON-190004196 tempest-FloatingIPsAssociationTestJSON-190004196-project-member] Image pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 620.955708] env[62503]: DEBUG nova.virt.hardware [None req-3c8eac85-f4ca-46c6-a47f-7220e201d8ac tempest-FloatingIPsAssociationTestJSON-190004196 tempest-FloatingIPsAssociationTestJSON-190004196-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 620.955908] env[62503]: DEBUG nova.virt.hardware [None req-3c8eac85-f4ca-46c6-a47f-7220e201d8ac tempest-FloatingIPsAssociationTestJSON-190004196 tempest-FloatingIPsAssociationTestJSON-190004196-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 620.956152] env[62503]: DEBUG nova.virt.hardware [None req-3c8eac85-f4ca-46c6-a47f-7220e201d8ac tempest-FloatingIPsAssociationTestJSON-190004196 tempest-FloatingIPsAssociationTestJSON-190004196-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 620.956432] env[62503]: DEBUG nova.virt.hardware [None req-3c8eac85-f4ca-46c6-a47f-7220e201d8ac tempest-FloatingIPsAssociationTestJSON-190004196 tempest-FloatingIPsAssociationTestJSON-190004196-project-member] Got 1 possible topologies {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 620.956666] env[62503]: DEBUG nova.virt.hardware [None req-3c8eac85-f4ca-46c6-a47f-7220e201d8ac tempest-FloatingIPsAssociationTestJSON-190004196 tempest-FloatingIPsAssociationTestJSON-190004196-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 620.956844] env[62503]: DEBUG nova.virt.hardware [None req-3c8eac85-f4ca-46c6-a47f-7220e201d8ac tempest-FloatingIPsAssociationTestJSON-190004196 tempest-FloatingIPsAssociationTestJSON-190004196-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 620.957851] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26ed0d2f-3662-4d73-9a99-348b3db12552 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.966551] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9129f8d5-ddb9-470f-98f7-e7ac82cf51bd {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.981217] env[62503]: ERROR nova.compute.manager [None req-3c8eac85-f4ca-46c6-a47f-7220e201d8ac tempest-FloatingIPsAssociationTestJSON-190004196 tempest-FloatingIPsAssociationTestJSON-190004196-project-member] [instance: a8d8e232-6096-4da3-8f2c-65a5e5f713ae] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port f8cd1af9-b011-4670-86ad-4792342fda84, please check neutron logs for more information. [ 620.981217] env[62503]: ERROR nova.compute.manager [instance: a8d8e232-6096-4da3-8f2c-65a5e5f713ae] Traceback (most recent call last): [ 620.981217] env[62503]: ERROR nova.compute.manager [instance: a8d8e232-6096-4da3-8f2c-65a5e5f713ae] File "/opt/stack/nova/nova/compute/manager.py", line 2897, in _build_resources [ 620.981217] env[62503]: ERROR nova.compute.manager [instance: a8d8e232-6096-4da3-8f2c-65a5e5f713ae] yield resources [ 620.981217] env[62503]: ERROR nova.compute.manager [instance: a8d8e232-6096-4da3-8f2c-65a5e5f713ae] File "/opt/stack/nova/nova/compute/manager.py", line 2644, in _build_and_run_instance [ 620.981217] env[62503]: ERROR nova.compute.manager [instance: a8d8e232-6096-4da3-8f2c-65a5e5f713ae] self.driver.spawn(context, instance, image_meta, [ 620.981217] env[62503]: ERROR nova.compute.manager [instance: a8d8e232-6096-4da3-8f2c-65a5e5f713ae] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 620.981217] env[62503]: ERROR nova.compute.manager [instance: a8d8e232-6096-4da3-8f2c-65a5e5f713ae] self._vmops.spawn(context, instance, image_meta, injected_files, [ 620.981217] env[62503]: ERROR nova.compute.manager [instance: a8d8e232-6096-4da3-8f2c-65a5e5f713ae] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 620.981217] env[62503]: ERROR nova.compute.manager [instance: a8d8e232-6096-4da3-8f2c-65a5e5f713ae] vm_ref = self.build_virtual_machine(instance, [ 620.981217] env[62503]: ERROR nova.compute.manager [instance: a8d8e232-6096-4da3-8f2c-65a5e5f713ae] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 620.981462] env[62503]: ERROR nova.compute.manager [instance: a8d8e232-6096-4da3-8f2c-65a5e5f713ae] vif_infos = vmwarevif.get_vif_info(self._session, [ 620.981462] env[62503]: ERROR nova.compute.manager [instance: a8d8e232-6096-4da3-8f2c-65a5e5f713ae] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 620.981462] env[62503]: ERROR nova.compute.manager [instance: a8d8e232-6096-4da3-8f2c-65a5e5f713ae] for vif in network_info: [ 620.981462] env[62503]: ERROR nova.compute.manager [instance: a8d8e232-6096-4da3-8f2c-65a5e5f713ae] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 620.981462] env[62503]: ERROR nova.compute.manager [instance: a8d8e232-6096-4da3-8f2c-65a5e5f713ae] return self._sync_wrapper(fn, *args, **kwargs) [ 620.981462] env[62503]: ERROR nova.compute.manager [instance: a8d8e232-6096-4da3-8f2c-65a5e5f713ae] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 620.981462] env[62503]: ERROR nova.compute.manager [instance: a8d8e232-6096-4da3-8f2c-65a5e5f713ae] self.wait() [ 620.981462] env[62503]: ERROR nova.compute.manager [instance: a8d8e232-6096-4da3-8f2c-65a5e5f713ae] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 620.981462] env[62503]: ERROR nova.compute.manager [instance: a8d8e232-6096-4da3-8f2c-65a5e5f713ae] self[:] = self._gt.wait() [ 620.981462] env[62503]: ERROR nova.compute.manager [instance: a8d8e232-6096-4da3-8f2c-65a5e5f713ae] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 620.981462] env[62503]: ERROR nova.compute.manager [instance: a8d8e232-6096-4da3-8f2c-65a5e5f713ae] return self._exit_event.wait() [ 620.981462] env[62503]: ERROR nova.compute.manager [instance: a8d8e232-6096-4da3-8f2c-65a5e5f713ae] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 620.981462] env[62503]: ERROR nova.compute.manager [instance: a8d8e232-6096-4da3-8f2c-65a5e5f713ae] current.throw(*self._exc) [ 620.981724] env[62503]: ERROR nova.compute.manager [instance: a8d8e232-6096-4da3-8f2c-65a5e5f713ae] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 620.981724] env[62503]: ERROR nova.compute.manager [instance: a8d8e232-6096-4da3-8f2c-65a5e5f713ae] result = function(*args, **kwargs) [ 620.981724] env[62503]: ERROR nova.compute.manager [instance: a8d8e232-6096-4da3-8f2c-65a5e5f713ae] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 620.981724] env[62503]: ERROR nova.compute.manager [instance: a8d8e232-6096-4da3-8f2c-65a5e5f713ae] return func(*args, **kwargs) [ 620.981724] env[62503]: ERROR nova.compute.manager [instance: a8d8e232-6096-4da3-8f2c-65a5e5f713ae] File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 620.981724] env[62503]: ERROR nova.compute.manager [instance: a8d8e232-6096-4da3-8f2c-65a5e5f713ae] raise e [ 620.981724] env[62503]: ERROR nova.compute.manager [instance: a8d8e232-6096-4da3-8f2c-65a5e5f713ae] File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 620.981724] env[62503]: ERROR nova.compute.manager [instance: a8d8e232-6096-4da3-8f2c-65a5e5f713ae] nwinfo = self.network_api.allocate_for_instance( [ 620.981724] env[62503]: ERROR nova.compute.manager [instance: a8d8e232-6096-4da3-8f2c-65a5e5f713ae] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 620.981724] env[62503]: ERROR nova.compute.manager [instance: a8d8e232-6096-4da3-8f2c-65a5e5f713ae] created_port_ids = self._update_ports_for_instance( [ 620.981724] env[62503]: ERROR nova.compute.manager [instance: a8d8e232-6096-4da3-8f2c-65a5e5f713ae] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 620.981724] env[62503]: ERROR nova.compute.manager [instance: a8d8e232-6096-4da3-8f2c-65a5e5f713ae] with excutils.save_and_reraise_exception(): [ 620.981724] env[62503]: ERROR nova.compute.manager [instance: a8d8e232-6096-4da3-8f2c-65a5e5f713ae] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 620.981999] env[62503]: ERROR nova.compute.manager [instance: a8d8e232-6096-4da3-8f2c-65a5e5f713ae] self.force_reraise() [ 620.981999] env[62503]: ERROR nova.compute.manager [instance: a8d8e232-6096-4da3-8f2c-65a5e5f713ae] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 620.981999] env[62503]: ERROR nova.compute.manager [instance: a8d8e232-6096-4da3-8f2c-65a5e5f713ae] raise self.value [ 620.981999] env[62503]: ERROR nova.compute.manager [instance: a8d8e232-6096-4da3-8f2c-65a5e5f713ae] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 620.981999] env[62503]: ERROR nova.compute.manager [instance: a8d8e232-6096-4da3-8f2c-65a5e5f713ae] updated_port = self._update_port( [ 620.981999] env[62503]: ERROR nova.compute.manager [instance: a8d8e232-6096-4da3-8f2c-65a5e5f713ae] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 620.981999] env[62503]: ERROR nova.compute.manager [instance: a8d8e232-6096-4da3-8f2c-65a5e5f713ae] _ensure_no_port_binding_failure(port) [ 620.981999] env[62503]: ERROR nova.compute.manager [instance: a8d8e232-6096-4da3-8f2c-65a5e5f713ae] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 620.981999] env[62503]: ERROR nova.compute.manager [instance: a8d8e232-6096-4da3-8f2c-65a5e5f713ae] raise exception.PortBindingFailed(port_id=port['id']) [ 620.981999] env[62503]: ERROR nova.compute.manager [instance: a8d8e232-6096-4da3-8f2c-65a5e5f713ae] nova.exception.PortBindingFailed: Binding failed for port f8cd1af9-b011-4670-86ad-4792342fda84, please check neutron logs for more information. [ 620.981999] env[62503]: ERROR nova.compute.manager [instance: a8d8e232-6096-4da3-8f2c-65a5e5f713ae] [ 620.981999] env[62503]: INFO nova.compute.manager [None req-3c8eac85-f4ca-46c6-a47f-7220e201d8ac tempest-FloatingIPsAssociationTestJSON-190004196 tempest-FloatingIPsAssociationTestJSON-190004196-project-member] [instance: a8d8e232-6096-4da3-8f2c-65a5e5f713ae] Terminating instance [ 620.983538] env[62503]: DEBUG oslo_concurrency.lockutils [None req-3c8eac85-f4ca-46c6-a47f-7220e201d8ac tempest-FloatingIPsAssociationTestJSON-190004196 tempest-FloatingIPsAssociationTestJSON-190004196-project-member] Acquiring lock "refresh_cache-a8d8e232-6096-4da3-8f2c-65a5e5f713ae" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 620.983709] env[62503]: DEBUG oslo_concurrency.lockutils [None req-3c8eac85-f4ca-46c6-a47f-7220e201d8ac tempest-FloatingIPsAssociationTestJSON-190004196 tempest-FloatingIPsAssociationTestJSON-190004196-project-member] Acquired lock "refresh_cache-a8d8e232-6096-4da3-8f2c-65a5e5f713ae" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 620.983878] env[62503]: DEBUG nova.network.neutron [None req-3c8eac85-f4ca-46c6-a47f-7220e201d8ac tempest-FloatingIPsAssociationTestJSON-190004196 tempest-FloatingIPsAssociationTestJSON-190004196-project-member] [instance: a8d8e232-6096-4da3-8f2c-65a5e5f713ae] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 621.269809] env[62503]: DEBUG nova.compute.manager [req-4bdb578d-9a3a-4da4-9941-fb6486bd054c req-f7b76484-c90d-4968-a0c0-e27378622599 service nova] [instance: a8d8e232-6096-4da3-8f2c-65a5e5f713ae] Received event network-changed-f8cd1af9-b011-4670-86ad-4792342fda84 {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 621.270092] env[62503]: DEBUG nova.compute.manager [req-4bdb578d-9a3a-4da4-9941-fb6486bd054c req-f7b76484-c90d-4968-a0c0-e27378622599 service nova] [instance: a8d8e232-6096-4da3-8f2c-65a5e5f713ae] Refreshing instance network info cache due to event network-changed-f8cd1af9-b011-4670-86ad-4792342fda84. {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11432}} [ 621.270307] env[62503]: DEBUG oslo_concurrency.lockutils [req-4bdb578d-9a3a-4da4-9941-fb6486bd054c req-f7b76484-c90d-4968-a0c0-e27378622599 service nova] Acquiring lock "refresh_cache-a8d8e232-6096-4da3-8f2c-65a5e5f713ae" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 621.399686] env[62503]: DEBUG nova.network.neutron [None req-c5889fb0-1c0e-4d03-b6a4-67f76941137f tempest-ServersTestFqdnHostnames-1580407543 tempest-ServersTestFqdnHostnames-1580407543-project-member] [instance: 7a0b2744-2bb0-4eee-9861-418ba67b719c] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 621.476972] env[62503]: DEBUG nova.network.neutron [None req-c5889fb0-1c0e-4d03-b6a4-67f76941137f tempest-ServersTestFqdnHostnames-1580407543 tempest-ServersTestFqdnHostnames-1580407543-project-member] [instance: 7a0b2744-2bb0-4eee-9861-418ba67b719c] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 621.501063] env[62503]: DEBUG nova.network.neutron [None req-3c8eac85-f4ca-46c6-a47f-7220e201d8ac tempest-FloatingIPsAssociationTestJSON-190004196 tempest-FloatingIPsAssociationTestJSON-190004196-project-member] [instance: a8d8e232-6096-4da3-8f2c-65a5e5f713ae] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 621.583125] env[62503]: DEBUG nova.network.neutron [None req-3c8eac85-f4ca-46c6-a47f-7220e201d8ac tempest-FloatingIPsAssociationTestJSON-190004196 tempest-FloatingIPsAssociationTestJSON-190004196-project-member] [instance: a8d8e232-6096-4da3-8f2c-65a5e5f713ae] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 621.731308] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06e28fe1-1b5f-4741-aa22-15caca26d712 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.739147] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f4b7d05-23f4-444c-9c42-a2bb22c1959c {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.771172] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fb07359-34ff-4bf6-8496-726234857023 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.778952] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7ddcd39-23d7-44e4-bc67-bddfc79fb7b9 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.792963] env[62503]: DEBUG nova.compute.provider_tree [None req-cc9648da-f367-499f-9ae4-d540c12618f7 tempest-ServerActionsTestJSON-1148358597 tempest-ServerActionsTestJSON-1148358597-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 621.980123] env[62503]: DEBUG oslo_concurrency.lockutils [None req-c5889fb0-1c0e-4d03-b6a4-67f76941137f tempest-ServersTestFqdnHostnames-1580407543 tempest-ServersTestFqdnHostnames-1580407543-project-member] Releasing lock "refresh_cache-7a0b2744-2bb0-4eee-9861-418ba67b719c" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 621.980384] env[62503]: DEBUG nova.compute.manager [None req-c5889fb0-1c0e-4d03-b6a4-67f76941137f tempest-ServersTestFqdnHostnames-1580407543 tempest-ServersTestFqdnHostnames-1580407543-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62503) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3032}} [ 621.980568] env[62503]: DEBUG nova.compute.manager [None req-c5889fb0-1c0e-4d03-b6a4-67f76941137f tempest-ServersTestFqdnHostnames-1580407543 tempest-ServersTestFqdnHostnames-1580407543-project-member] [instance: 7a0b2744-2bb0-4eee-9861-418ba67b719c] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 621.980785] env[62503]: DEBUG nova.network.neutron [None req-c5889fb0-1c0e-4d03-b6a4-67f76941137f tempest-ServersTestFqdnHostnames-1580407543 tempest-ServersTestFqdnHostnames-1580407543-project-member] [instance: 7a0b2744-2bb0-4eee-9861-418ba67b719c] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 621.997612] env[62503]: DEBUG nova.network.neutron [None req-c5889fb0-1c0e-4d03-b6a4-67f76941137f tempest-ServersTestFqdnHostnames-1580407543 tempest-ServersTestFqdnHostnames-1580407543-project-member] [instance: 7a0b2744-2bb0-4eee-9861-418ba67b719c] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 622.085282] env[62503]: DEBUG oslo_concurrency.lockutils [None req-3c8eac85-f4ca-46c6-a47f-7220e201d8ac tempest-FloatingIPsAssociationTestJSON-190004196 tempest-FloatingIPsAssociationTestJSON-190004196-project-member] Releasing lock "refresh_cache-a8d8e232-6096-4da3-8f2c-65a5e5f713ae" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 622.085717] env[62503]: DEBUG nova.compute.manager [None req-3c8eac85-f4ca-46c6-a47f-7220e201d8ac tempest-FloatingIPsAssociationTestJSON-190004196 tempest-FloatingIPsAssociationTestJSON-190004196-project-member] [instance: a8d8e232-6096-4da3-8f2c-65a5e5f713ae] Start destroying the instance on the hypervisor. {{(pid=62503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3156}} [ 622.085910] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-3c8eac85-f4ca-46c6-a47f-7220e201d8ac tempest-FloatingIPsAssociationTestJSON-190004196 tempest-FloatingIPsAssociationTestJSON-190004196-project-member] [instance: a8d8e232-6096-4da3-8f2c-65a5e5f713ae] Destroying instance {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 622.086239] env[62503]: DEBUG oslo_concurrency.lockutils [req-4bdb578d-9a3a-4da4-9941-fb6486bd054c req-f7b76484-c90d-4968-a0c0-e27378622599 service nova] Acquired lock "refresh_cache-a8d8e232-6096-4da3-8f2c-65a5e5f713ae" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 622.086809] env[62503]: DEBUG nova.network.neutron [req-4bdb578d-9a3a-4da4-9941-fb6486bd054c req-f7b76484-c90d-4968-a0c0-e27378622599 service nova] [instance: a8d8e232-6096-4da3-8f2c-65a5e5f713ae] Refreshing network info cache for port f8cd1af9-b011-4670-86ad-4792342fda84 {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 622.087889] env[62503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7997a156-3742-4717-aa36-cfa6cd1dcf94 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.096540] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e166af96-0a80-48a8-a89e-e5df3efcab49 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.121154] env[62503]: WARNING nova.virt.vmwareapi.vmops [None req-3c8eac85-f4ca-46c6-a47f-7220e201d8ac tempest-FloatingIPsAssociationTestJSON-190004196 tempest-FloatingIPsAssociationTestJSON-190004196-project-member] [instance: a8d8e232-6096-4da3-8f2c-65a5e5f713ae] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance a8d8e232-6096-4da3-8f2c-65a5e5f713ae could not be found. [ 622.121389] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-3c8eac85-f4ca-46c6-a47f-7220e201d8ac tempest-FloatingIPsAssociationTestJSON-190004196 tempest-FloatingIPsAssociationTestJSON-190004196-project-member] [instance: a8d8e232-6096-4da3-8f2c-65a5e5f713ae] Instance destroyed {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 622.121567] env[62503]: INFO nova.compute.manager [None req-3c8eac85-f4ca-46c6-a47f-7220e201d8ac tempest-FloatingIPsAssociationTestJSON-190004196 tempest-FloatingIPsAssociationTestJSON-190004196-project-member] [instance: a8d8e232-6096-4da3-8f2c-65a5e5f713ae] Took 0.04 seconds to destroy the instance on the hypervisor. [ 622.121866] env[62503]: DEBUG oslo.service.loopingcall [None req-3c8eac85-f4ca-46c6-a47f-7220e201d8ac tempest-FloatingIPsAssociationTestJSON-190004196 tempest-FloatingIPsAssociationTestJSON-190004196-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 622.122031] env[62503]: DEBUG nova.compute.manager [-] [instance: a8d8e232-6096-4da3-8f2c-65a5e5f713ae] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 622.122113] env[62503]: DEBUG nova.network.neutron [-] [instance: a8d8e232-6096-4da3-8f2c-65a5e5f713ae] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 622.149299] env[62503]: DEBUG nova.network.neutron [-] [instance: a8d8e232-6096-4da3-8f2c-65a5e5f713ae] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 622.296575] env[62503]: DEBUG nova.scheduler.client.report [None req-cc9648da-f367-499f-9ae4-d540c12618f7 tempest-ServerActionsTestJSON-1148358597 tempest-ServerActionsTestJSON-1148358597-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 622.500692] env[62503]: DEBUG nova.network.neutron [None req-c5889fb0-1c0e-4d03-b6a4-67f76941137f tempest-ServersTestFqdnHostnames-1580407543 tempest-ServersTestFqdnHostnames-1580407543-project-member] [instance: 7a0b2744-2bb0-4eee-9861-418ba67b719c] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 622.606114] env[62503]: DEBUG nova.network.neutron [req-4bdb578d-9a3a-4da4-9941-fb6486bd054c req-f7b76484-c90d-4968-a0c0-e27378622599 service nova] [instance: a8d8e232-6096-4da3-8f2c-65a5e5f713ae] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 622.651655] env[62503]: DEBUG nova.network.neutron [-] [instance: a8d8e232-6096-4da3-8f2c-65a5e5f713ae] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 622.684289] env[62503]: DEBUG nova.network.neutron [req-4bdb578d-9a3a-4da4-9941-fb6486bd054c req-f7b76484-c90d-4968-a0c0-e27378622599 service nova] [instance: a8d8e232-6096-4da3-8f2c-65a5e5f713ae] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 622.801696] env[62503]: DEBUG oslo_concurrency.lockutils [None req-cc9648da-f367-499f-9ae4-d540c12618f7 tempest-ServerActionsTestJSON-1148358597 tempest-ServerActionsTestJSON-1148358597-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.925s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 622.802350] env[62503]: ERROR nova.compute.manager [None req-cc9648da-f367-499f-9ae4-d540c12618f7 tempest-ServerActionsTestJSON-1148358597 tempest-ServerActionsTestJSON-1148358597-project-member] [instance: d8d4c087-9b0f-48c7-bd05-291a7e2a3e83] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 20d6b598-9cc1-4dee-aa92-c60595007db3, please check neutron logs for more information. [ 622.802350] env[62503]: ERROR nova.compute.manager [instance: d8d4c087-9b0f-48c7-bd05-291a7e2a3e83] Traceback (most recent call last): [ 622.802350] env[62503]: ERROR nova.compute.manager [instance: d8d4c087-9b0f-48c7-bd05-291a7e2a3e83] File "/opt/stack/nova/nova/compute/manager.py", line 2644, in _build_and_run_instance [ 622.802350] env[62503]: ERROR nova.compute.manager [instance: d8d4c087-9b0f-48c7-bd05-291a7e2a3e83] self.driver.spawn(context, instance, image_meta, [ 622.802350] env[62503]: ERROR nova.compute.manager [instance: d8d4c087-9b0f-48c7-bd05-291a7e2a3e83] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 622.802350] env[62503]: ERROR nova.compute.manager [instance: d8d4c087-9b0f-48c7-bd05-291a7e2a3e83] self._vmops.spawn(context, instance, image_meta, injected_files, [ 622.802350] env[62503]: ERROR nova.compute.manager [instance: d8d4c087-9b0f-48c7-bd05-291a7e2a3e83] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 622.802350] env[62503]: ERROR nova.compute.manager [instance: d8d4c087-9b0f-48c7-bd05-291a7e2a3e83] vm_ref = self.build_virtual_machine(instance, [ 622.802350] env[62503]: ERROR nova.compute.manager [instance: d8d4c087-9b0f-48c7-bd05-291a7e2a3e83] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 622.802350] env[62503]: ERROR nova.compute.manager [instance: d8d4c087-9b0f-48c7-bd05-291a7e2a3e83] vif_infos = vmwarevif.get_vif_info(self._session, [ 622.802350] env[62503]: ERROR nova.compute.manager [instance: d8d4c087-9b0f-48c7-bd05-291a7e2a3e83] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 622.802637] env[62503]: ERROR nova.compute.manager [instance: d8d4c087-9b0f-48c7-bd05-291a7e2a3e83] for vif in network_info: [ 622.802637] env[62503]: ERROR nova.compute.manager [instance: d8d4c087-9b0f-48c7-bd05-291a7e2a3e83] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 622.802637] env[62503]: ERROR nova.compute.manager [instance: d8d4c087-9b0f-48c7-bd05-291a7e2a3e83] return self._sync_wrapper(fn, *args, **kwargs) [ 622.802637] env[62503]: ERROR nova.compute.manager [instance: d8d4c087-9b0f-48c7-bd05-291a7e2a3e83] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 622.802637] env[62503]: ERROR nova.compute.manager [instance: d8d4c087-9b0f-48c7-bd05-291a7e2a3e83] self.wait() [ 622.802637] env[62503]: ERROR nova.compute.manager [instance: d8d4c087-9b0f-48c7-bd05-291a7e2a3e83] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 622.802637] env[62503]: ERROR nova.compute.manager [instance: d8d4c087-9b0f-48c7-bd05-291a7e2a3e83] self[:] = self._gt.wait() [ 622.802637] env[62503]: ERROR nova.compute.manager [instance: d8d4c087-9b0f-48c7-bd05-291a7e2a3e83] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 622.802637] env[62503]: ERROR nova.compute.manager [instance: d8d4c087-9b0f-48c7-bd05-291a7e2a3e83] return self._exit_event.wait() [ 622.802637] env[62503]: ERROR nova.compute.manager [instance: d8d4c087-9b0f-48c7-bd05-291a7e2a3e83] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 622.802637] env[62503]: ERROR nova.compute.manager [instance: d8d4c087-9b0f-48c7-bd05-291a7e2a3e83] result = hub.switch() [ 622.802637] env[62503]: ERROR nova.compute.manager [instance: d8d4c087-9b0f-48c7-bd05-291a7e2a3e83] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 622.802637] env[62503]: ERROR nova.compute.manager [instance: d8d4c087-9b0f-48c7-bd05-291a7e2a3e83] return self.greenlet.switch() [ 622.802963] env[62503]: ERROR nova.compute.manager [instance: d8d4c087-9b0f-48c7-bd05-291a7e2a3e83] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 622.802963] env[62503]: ERROR nova.compute.manager [instance: d8d4c087-9b0f-48c7-bd05-291a7e2a3e83] result = function(*args, **kwargs) [ 622.802963] env[62503]: ERROR nova.compute.manager [instance: d8d4c087-9b0f-48c7-bd05-291a7e2a3e83] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 622.802963] env[62503]: ERROR nova.compute.manager [instance: d8d4c087-9b0f-48c7-bd05-291a7e2a3e83] return func(*args, **kwargs) [ 622.802963] env[62503]: ERROR nova.compute.manager [instance: d8d4c087-9b0f-48c7-bd05-291a7e2a3e83] File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 622.802963] env[62503]: ERROR nova.compute.manager [instance: d8d4c087-9b0f-48c7-bd05-291a7e2a3e83] raise e [ 622.802963] env[62503]: ERROR nova.compute.manager [instance: d8d4c087-9b0f-48c7-bd05-291a7e2a3e83] File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 622.802963] env[62503]: ERROR nova.compute.manager [instance: d8d4c087-9b0f-48c7-bd05-291a7e2a3e83] nwinfo = self.network_api.allocate_for_instance( [ 622.802963] env[62503]: ERROR nova.compute.manager [instance: d8d4c087-9b0f-48c7-bd05-291a7e2a3e83] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 622.802963] env[62503]: ERROR nova.compute.manager [instance: d8d4c087-9b0f-48c7-bd05-291a7e2a3e83] created_port_ids = self._update_ports_for_instance( [ 622.802963] env[62503]: ERROR nova.compute.manager [instance: d8d4c087-9b0f-48c7-bd05-291a7e2a3e83] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 622.802963] env[62503]: ERROR nova.compute.manager [instance: d8d4c087-9b0f-48c7-bd05-291a7e2a3e83] with excutils.save_and_reraise_exception(): [ 622.802963] env[62503]: ERROR nova.compute.manager [instance: d8d4c087-9b0f-48c7-bd05-291a7e2a3e83] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 622.803307] env[62503]: ERROR nova.compute.manager [instance: d8d4c087-9b0f-48c7-bd05-291a7e2a3e83] self.force_reraise() [ 622.803307] env[62503]: ERROR nova.compute.manager [instance: d8d4c087-9b0f-48c7-bd05-291a7e2a3e83] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 622.803307] env[62503]: ERROR nova.compute.manager [instance: d8d4c087-9b0f-48c7-bd05-291a7e2a3e83] raise self.value [ 622.803307] env[62503]: ERROR nova.compute.manager [instance: d8d4c087-9b0f-48c7-bd05-291a7e2a3e83] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 622.803307] env[62503]: ERROR nova.compute.manager [instance: d8d4c087-9b0f-48c7-bd05-291a7e2a3e83] updated_port = self._update_port( [ 622.803307] env[62503]: ERROR nova.compute.manager [instance: d8d4c087-9b0f-48c7-bd05-291a7e2a3e83] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 622.803307] env[62503]: ERROR nova.compute.manager [instance: d8d4c087-9b0f-48c7-bd05-291a7e2a3e83] _ensure_no_port_binding_failure(port) [ 622.803307] env[62503]: ERROR nova.compute.manager [instance: d8d4c087-9b0f-48c7-bd05-291a7e2a3e83] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 622.803307] env[62503]: ERROR nova.compute.manager [instance: d8d4c087-9b0f-48c7-bd05-291a7e2a3e83] raise exception.PortBindingFailed(port_id=port['id']) [ 622.803307] env[62503]: ERROR nova.compute.manager [instance: d8d4c087-9b0f-48c7-bd05-291a7e2a3e83] nova.exception.PortBindingFailed: Binding failed for port 20d6b598-9cc1-4dee-aa92-c60595007db3, please check neutron logs for more information. [ 622.803307] env[62503]: ERROR nova.compute.manager [instance: d8d4c087-9b0f-48c7-bd05-291a7e2a3e83] [ 622.803580] env[62503]: DEBUG nova.compute.utils [None req-cc9648da-f367-499f-9ae4-d540c12618f7 tempest-ServerActionsTestJSON-1148358597 tempest-ServerActionsTestJSON-1148358597-project-member] [instance: d8d4c087-9b0f-48c7-bd05-291a7e2a3e83] Binding failed for port 20d6b598-9cc1-4dee-aa92-c60595007db3, please check neutron logs for more information. {{(pid=62503) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 622.804253] env[62503]: DEBUG oslo_concurrency.lockutils [None req-2db54e89-b7c8-428c-855a-ae3533893b41 tempest-VolumesAssistedSnapshotsTest-64252410 tempest-VolumesAssistedSnapshotsTest-64252410-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 20.747s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 622.807216] env[62503]: DEBUG nova.compute.manager [None req-cc9648da-f367-499f-9ae4-d540c12618f7 tempest-ServerActionsTestJSON-1148358597 tempest-ServerActionsTestJSON-1148358597-project-member] [instance: d8d4c087-9b0f-48c7-bd05-291a7e2a3e83] Build of instance d8d4c087-9b0f-48c7-bd05-291a7e2a3e83 was re-scheduled: Binding failed for port 20d6b598-9cc1-4dee-aa92-c60595007db3, please check neutron logs for more information. {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2483}} [ 622.807657] env[62503]: DEBUG nova.compute.manager [None req-cc9648da-f367-499f-9ae4-d540c12618f7 tempest-ServerActionsTestJSON-1148358597 tempest-ServerActionsTestJSON-1148358597-project-member] [instance: d8d4c087-9b0f-48c7-bd05-291a7e2a3e83] Unplugging VIFs for instance {{(pid=62503) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3009}} [ 622.807879] env[62503]: DEBUG oslo_concurrency.lockutils [None req-cc9648da-f367-499f-9ae4-d540c12618f7 tempest-ServerActionsTestJSON-1148358597 tempest-ServerActionsTestJSON-1148358597-project-member] Acquiring lock "refresh_cache-d8d4c087-9b0f-48c7-bd05-291a7e2a3e83" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 622.808061] env[62503]: DEBUG oslo_concurrency.lockutils [None req-cc9648da-f367-499f-9ae4-d540c12618f7 tempest-ServerActionsTestJSON-1148358597 tempest-ServerActionsTestJSON-1148358597-project-member] Acquired lock "refresh_cache-d8d4c087-9b0f-48c7-bd05-291a7e2a3e83" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 622.808236] env[62503]: DEBUG nova.network.neutron [None req-cc9648da-f367-499f-9ae4-d540c12618f7 tempest-ServerActionsTestJSON-1148358597 tempest-ServerActionsTestJSON-1148358597-project-member] [instance: d8d4c087-9b0f-48c7-bd05-291a7e2a3e83] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 623.003094] env[62503]: INFO nova.compute.manager [None req-c5889fb0-1c0e-4d03-b6a4-67f76941137f tempest-ServersTestFqdnHostnames-1580407543 tempest-ServersTestFqdnHostnames-1580407543-project-member] [instance: 7a0b2744-2bb0-4eee-9861-418ba67b719c] Took 1.02 seconds to deallocate network for instance. [ 623.154979] env[62503]: INFO nova.compute.manager [-] [instance: a8d8e232-6096-4da3-8f2c-65a5e5f713ae] Took 1.03 seconds to deallocate network for instance. [ 623.157464] env[62503]: DEBUG nova.compute.claims [None req-3c8eac85-f4ca-46c6-a47f-7220e201d8ac tempest-FloatingIPsAssociationTestJSON-190004196 tempest-FloatingIPsAssociationTestJSON-190004196-project-member] [instance: a8d8e232-6096-4da3-8f2c-65a5e5f713ae] Aborting claim: {{(pid=62503) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 623.157642] env[62503]: DEBUG oslo_concurrency.lockutils [None req-3c8eac85-f4ca-46c6-a47f-7220e201d8ac tempest-FloatingIPsAssociationTestJSON-190004196 tempest-FloatingIPsAssociationTestJSON-190004196-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 623.187852] env[62503]: DEBUG oslo_concurrency.lockutils [req-4bdb578d-9a3a-4da4-9941-fb6486bd054c req-f7b76484-c90d-4968-a0c0-e27378622599 service nova] Releasing lock "refresh_cache-a8d8e232-6096-4da3-8f2c-65a5e5f713ae" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 623.188038] env[62503]: DEBUG nova.compute.manager [req-4bdb578d-9a3a-4da4-9941-fb6486bd054c req-f7b76484-c90d-4968-a0c0-e27378622599 service nova] [instance: a8d8e232-6096-4da3-8f2c-65a5e5f713ae] Received event network-vif-deleted-f8cd1af9-b011-4670-86ad-4792342fda84 {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 623.327542] env[62503]: DEBUG nova.network.neutron [None req-cc9648da-f367-499f-9ae4-d540c12618f7 tempest-ServerActionsTestJSON-1148358597 tempest-ServerActionsTestJSON-1148358597-project-member] [instance: d8d4c087-9b0f-48c7-bd05-291a7e2a3e83] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 623.410712] env[62503]: DEBUG nova.network.neutron [None req-cc9648da-f367-499f-9ae4-d540c12618f7 tempest-ServerActionsTestJSON-1148358597 tempest-ServerActionsTestJSON-1148358597-project-member] [instance: d8d4c087-9b0f-48c7-bd05-291a7e2a3e83] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 623.617790] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48023a47-e200-4291-94a2-a67259b8246d {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.625598] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a62b0eb-d25b-4676-be58-62249cae5bf9 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.656448] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c232ba5-329f-4623-8c6a-580a67ca6823 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.664039] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d11729d-bf0a-4126-a737-3ff855450d4e {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.678019] env[62503]: DEBUG nova.compute.provider_tree [None req-2db54e89-b7c8-428c-855a-ae3533893b41 tempest-VolumesAssistedSnapshotsTest-64252410 tempest-VolumesAssistedSnapshotsTest-64252410-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 623.913632] env[62503]: DEBUG oslo_concurrency.lockutils [None req-cc9648da-f367-499f-9ae4-d540c12618f7 tempest-ServerActionsTestJSON-1148358597 tempest-ServerActionsTestJSON-1148358597-project-member] Releasing lock "refresh_cache-d8d4c087-9b0f-48c7-bd05-291a7e2a3e83" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 623.913951] env[62503]: DEBUG nova.compute.manager [None req-cc9648da-f367-499f-9ae4-d540c12618f7 tempest-ServerActionsTestJSON-1148358597 tempest-ServerActionsTestJSON-1148358597-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62503) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3032}} [ 623.914182] env[62503]: DEBUG nova.compute.manager [None req-cc9648da-f367-499f-9ae4-d540c12618f7 tempest-ServerActionsTestJSON-1148358597 tempest-ServerActionsTestJSON-1148358597-project-member] [instance: d8d4c087-9b0f-48c7-bd05-291a7e2a3e83] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 623.914386] env[62503]: DEBUG nova.network.neutron [None req-cc9648da-f367-499f-9ae4-d540c12618f7 tempest-ServerActionsTestJSON-1148358597 tempest-ServerActionsTestJSON-1148358597-project-member] [instance: d8d4c087-9b0f-48c7-bd05-291a7e2a3e83] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 623.931947] env[62503]: DEBUG nova.network.neutron [None req-cc9648da-f367-499f-9ae4-d540c12618f7 tempest-ServerActionsTestJSON-1148358597 tempest-ServerActionsTestJSON-1148358597-project-member] [instance: d8d4c087-9b0f-48c7-bd05-291a7e2a3e83] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 624.034841] env[62503]: INFO nova.scheduler.client.report [None req-c5889fb0-1c0e-4d03-b6a4-67f76941137f tempest-ServersTestFqdnHostnames-1580407543 tempest-ServersTestFqdnHostnames-1580407543-project-member] Deleted allocations for instance 7a0b2744-2bb0-4eee-9861-418ba67b719c [ 624.180321] env[62503]: DEBUG nova.scheduler.client.report [None req-2db54e89-b7c8-428c-855a-ae3533893b41 tempest-VolumesAssistedSnapshotsTest-64252410 tempest-VolumesAssistedSnapshotsTest-64252410-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 624.437848] env[62503]: DEBUG nova.network.neutron [None req-cc9648da-f367-499f-9ae4-d540c12618f7 tempest-ServerActionsTestJSON-1148358597 tempest-ServerActionsTestJSON-1148358597-project-member] [instance: d8d4c087-9b0f-48c7-bd05-291a7e2a3e83] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 624.543381] env[62503]: DEBUG oslo_concurrency.lockutils [None req-c5889fb0-1c0e-4d03-b6a4-67f76941137f tempest-ServersTestFqdnHostnames-1580407543 tempest-ServersTestFqdnHostnames-1580407543-project-member] Lock "7a0b2744-2bb0-4eee-9861-418ba67b719c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 75.049s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 624.689056] env[62503]: DEBUG oslo_concurrency.lockutils [None req-2db54e89-b7c8-428c-855a-ae3533893b41 tempest-VolumesAssistedSnapshotsTest-64252410 tempest-VolumesAssistedSnapshotsTest-64252410-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.885s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 624.689643] env[62503]: ERROR nova.compute.manager [None req-2db54e89-b7c8-428c-855a-ae3533893b41 tempest-VolumesAssistedSnapshotsTest-64252410 tempest-VolumesAssistedSnapshotsTest-64252410-project-member] [instance: 24b4c233-c874-452e-a7fc-492ca2a49a09] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port af8161d8-e134-4f1a-a976-807eaad5a1b0, please check neutron logs for more information. [ 624.689643] env[62503]: ERROR nova.compute.manager [instance: 24b4c233-c874-452e-a7fc-492ca2a49a09] Traceback (most recent call last): [ 624.689643] env[62503]: ERROR nova.compute.manager [instance: 24b4c233-c874-452e-a7fc-492ca2a49a09] File "/opt/stack/nova/nova/compute/manager.py", line 2644, in _build_and_run_instance [ 624.689643] env[62503]: ERROR nova.compute.manager [instance: 24b4c233-c874-452e-a7fc-492ca2a49a09] self.driver.spawn(context, instance, image_meta, [ 624.689643] env[62503]: ERROR nova.compute.manager [instance: 24b4c233-c874-452e-a7fc-492ca2a49a09] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 624.689643] env[62503]: ERROR nova.compute.manager [instance: 24b4c233-c874-452e-a7fc-492ca2a49a09] self._vmops.spawn(context, instance, image_meta, injected_files, [ 624.689643] env[62503]: ERROR nova.compute.manager [instance: 24b4c233-c874-452e-a7fc-492ca2a49a09] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 624.689643] env[62503]: ERROR nova.compute.manager [instance: 24b4c233-c874-452e-a7fc-492ca2a49a09] vm_ref = self.build_virtual_machine(instance, [ 624.689643] env[62503]: ERROR nova.compute.manager [instance: 24b4c233-c874-452e-a7fc-492ca2a49a09] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 624.689643] env[62503]: ERROR nova.compute.manager [instance: 24b4c233-c874-452e-a7fc-492ca2a49a09] vif_infos = vmwarevif.get_vif_info(self._session, [ 624.689643] env[62503]: ERROR nova.compute.manager [instance: 24b4c233-c874-452e-a7fc-492ca2a49a09] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 624.689916] env[62503]: ERROR nova.compute.manager [instance: 24b4c233-c874-452e-a7fc-492ca2a49a09] for vif in network_info: [ 624.689916] env[62503]: ERROR nova.compute.manager [instance: 24b4c233-c874-452e-a7fc-492ca2a49a09] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 624.689916] env[62503]: ERROR nova.compute.manager [instance: 24b4c233-c874-452e-a7fc-492ca2a49a09] return self._sync_wrapper(fn, *args, **kwargs) [ 624.689916] env[62503]: ERROR nova.compute.manager [instance: 24b4c233-c874-452e-a7fc-492ca2a49a09] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 624.689916] env[62503]: ERROR nova.compute.manager [instance: 24b4c233-c874-452e-a7fc-492ca2a49a09] self.wait() [ 624.689916] env[62503]: ERROR nova.compute.manager [instance: 24b4c233-c874-452e-a7fc-492ca2a49a09] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 624.689916] env[62503]: ERROR nova.compute.manager [instance: 24b4c233-c874-452e-a7fc-492ca2a49a09] self[:] = self._gt.wait() [ 624.689916] env[62503]: ERROR nova.compute.manager [instance: 24b4c233-c874-452e-a7fc-492ca2a49a09] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 624.689916] env[62503]: ERROR nova.compute.manager [instance: 24b4c233-c874-452e-a7fc-492ca2a49a09] return self._exit_event.wait() [ 624.689916] env[62503]: ERROR nova.compute.manager [instance: 24b4c233-c874-452e-a7fc-492ca2a49a09] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 624.689916] env[62503]: ERROR nova.compute.manager [instance: 24b4c233-c874-452e-a7fc-492ca2a49a09] result = hub.switch() [ 624.689916] env[62503]: ERROR nova.compute.manager [instance: 24b4c233-c874-452e-a7fc-492ca2a49a09] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 624.689916] env[62503]: ERROR nova.compute.manager [instance: 24b4c233-c874-452e-a7fc-492ca2a49a09] return self.greenlet.switch() [ 624.690245] env[62503]: ERROR nova.compute.manager [instance: 24b4c233-c874-452e-a7fc-492ca2a49a09] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 624.690245] env[62503]: ERROR nova.compute.manager [instance: 24b4c233-c874-452e-a7fc-492ca2a49a09] result = function(*args, **kwargs) [ 624.690245] env[62503]: ERROR nova.compute.manager [instance: 24b4c233-c874-452e-a7fc-492ca2a49a09] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 624.690245] env[62503]: ERROR nova.compute.manager [instance: 24b4c233-c874-452e-a7fc-492ca2a49a09] return func(*args, **kwargs) [ 624.690245] env[62503]: ERROR nova.compute.manager [instance: 24b4c233-c874-452e-a7fc-492ca2a49a09] File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 624.690245] env[62503]: ERROR nova.compute.manager [instance: 24b4c233-c874-452e-a7fc-492ca2a49a09] raise e [ 624.690245] env[62503]: ERROR nova.compute.manager [instance: 24b4c233-c874-452e-a7fc-492ca2a49a09] File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 624.690245] env[62503]: ERROR nova.compute.manager [instance: 24b4c233-c874-452e-a7fc-492ca2a49a09] nwinfo = self.network_api.allocate_for_instance( [ 624.690245] env[62503]: ERROR nova.compute.manager [instance: 24b4c233-c874-452e-a7fc-492ca2a49a09] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 624.690245] env[62503]: ERROR nova.compute.manager [instance: 24b4c233-c874-452e-a7fc-492ca2a49a09] created_port_ids = self._update_ports_for_instance( [ 624.690245] env[62503]: ERROR nova.compute.manager [instance: 24b4c233-c874-452e-a7fc-492ca2a49a09] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 624.690245] env[62503]: ERROR nova.compute.manager [instance: 24b4c233-c874-452e-a7fc-492ca2a49a09] with excutils.save_and_reraise_exception(): [ 624.690245] env[62503]: ERROR nova.compute.manager [instance: 24b4c233-c874-452e-a7fc-492ca2a49a09] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 624.690574] env[62503]: ERROR nova.compute.manager [instance: 24b4c233-c874-452e-a7fc-492ca2a49a09] self.force_reraise() [ 624.690574] env[62503]: ERROR nova.compute.manager [instance: 24b4c233-c874-452e-a7fc-492ca2a49a09] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 624.690574] env[62503]: ERROR nova.compute.manager [instance: 24b4c233-c874-452e-a7fc-492ca2a49a09] raise self.value [ 624.690574] env[62503]: ERROR nova.compute.manager [instance: 24b4c233-c874-452e-a7fc-492ca2a49a09] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 624.690574] env[62503]: ERROR nova.compute.manager [instance: 24b4c233-c874-452e-a7fc-492ca2a49a09] updated_port = self._update_port( [ 624.690574] env[62503]: ERROR nova.compute.manager [instance: 24b4c233-c874-452e-a7fc-492ca2a49a09] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 624.690574] env[62503]: ERROR nova.compute.manager [instance: 24b4c233-c874-452e-a7fc-492ca2a49a09] _ensure_no_port_binding_failure(port) [ 624.690574] env[62503]: ERROR nova.compute.manager [instance: 24b4c233-c874-452e-a7fc-492ca2a49a09] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 624.690574] env[62503]: ERROR nova.compute.manager [instance: 24b4c233-c874-452e-a7fc-492ca2a49a09] raise exception.PortBindingFailed(port_id=port['id']) [ 624.690574] env[62503]: ERROR nova.compute.manager [instance: 24b4c233-c874-452e-a7fc-492ca2a49a09] nova.exception.PortBindingFailed: Binding failed for port af8161d8-e134-4f1a-a976-807eaad5a1b0, please check neutron logs for more information. [ 624.690574] env[62503]: ERROR nova.compute.manager [instance: 24b4c233-c874-452e-a7fc-492ca2a49a09] [ 624.690860] env[62503]: DEBUG nova.compute.utils [None req-2db54e89-b7c8-428c-855a-ae3533893b41 tempest-VolumesAssistedSnapshotsTest-64252410 tempest-VolumesAssistedSnapshotsTest-64252410-project-member] [instance: 24b4c233-c874-452e-a7fc-492ca2a49a09] Binding failed for port af8161d8-e134-4f1a-a976-807eaad5a1b0, please check neutron logs for more information. {{(pid=62503) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 624.691651] env[62503]: DEBUG oslo_concurrency.lockutils [None req-280dd265-183b-4c2b-ae27-d0e1a87186ff tempest-ServersAdmin275Test-1807589186 tempest-ServersAdmin275Test-1807589186-project-admin] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 21.975s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 624.691830] env[62503]: DEBUG nova.objects.instance [None req-280dd265-183b-4c2b-ae27-d0e1a87186ff tempest-ServersAdmin275Test-1807589186 tempest-ServersAdmin275Test-1807589186-project-admin] [instance: c4a88e75-690f-4bed-a4f9-a0de3b193eff] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62503) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 624.694398] env[62503]: DEBUG nova.compute.manager [None req-2db54e89-b7c8-428c-855a-ae3533893b41 tempest-VolumesAssistedSnapshotsTest-64252410 tempest-VolumesAssistedSnapshotsTest-64252410-project-member] [instance: 24b4c233-c874-452e-a7fc-492ca2a49a09] Build of instance 24b4c233-c874-452e-a7fc-492ca2a49a09 was re-scheduled: Binding failed for port af8161d8-e134-4f1a-a976-807eaad5a1b0, please check neutron logs for more information. {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2483}} [ 624.694810] env[62503]: DEBUG nova.compute.manager [None req-2db54e89-b7c8-428c-855a-ae3533893b41 tempest-VolumesAssistedSnapshotsTest-64252410 tempest-VolumesAssistedSnapshotsTest-64252410-project-member] [instance: 24b4c233-c874-452e-a7fc-492ca2a49a09] Unplugging VIFs for instance {{(pid=62503) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3009}} [ 624.695054] env[62503]: DEBUG oslo_concurrency.lockutils [None req-2db54e89-b7c8-428c-855a-ae3533893b41 tempest-VolumesAssistedSnapshotsTest-64252410 tempest-VolumesAssistedSnapshotsTest-64252410-project-member] Acquiring lock "refresh_cache-24b4c233-c874-452e-a7fc-492ca2a49a09" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 624.695205] env[62503]: DEBUG oslo_concurrency.lockutils [None req-2db54e89-b7c8-428c-855a-ae3533893b41 tempest-VolumesAssistedSnapshotsTest-64252410 tempest-VolumesAssistedSnapshotsTest-64252410-project-member] Acquired lock "refresh_cache-24b4c233-c874-452e-a7fc-492ca2a49a09" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 624.695360] env[62503]: DEBUG nova.network.neutron [None req-2db54e89-b7c8-428c-855a-ae3533893b41 tempest-VolumesAssistedSnapshotsTest-64252410 tempest-VolumesAssistedSnapshotsTest-64252410-project-member] [instance: 24b4c233-c874-452e-a7fc-492ca2a49a09] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 624.941868] env[62503]: INFO nova.compute.manager [None req-cc9648da-f367-499f-9ae4-d540c12618f7 tempest-ServerActionsTestJSON-1148358597 tempest-ServerActionsTestJSON-1148358597-project-member] [instance: d8d4c087-9b0f-48c7-bd05-291a7e2a3e83] Took 1.03 seconds to deallocate network for instance. [ 625.046224] env[62503]: DEBUG nova.compute.manager [None req-6b7abc00-006d-4855-9995-538e6edbbf28 tempest-ServersWithSpecificFlavorTestJSON-213993766 tempest-ServersWithSpecificFlavorTestJSON-213993766-project-member] [instance: 5a1af72f-71c8-42de-aa71-f011d85210a5] Starting instance... {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 625.214601] env[62503]: DEBUG nova.network.neutron [None req-2db54e89-b7c8-428c-855a-ae3533893b41 tempest-VolumesAssistedSnapshotsTest-64252410 tempest-VolumesAssistedSnapshotsTest-64252410-project-member] [instance: 24b4c233-c874-452e-a7fc-492ca2a49a09] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 625.311307] env[62503]: DEBUG nova.network.neutron [None req-2db54e89-b7c8-428c-855a-ae3533893b41 tempest-VolumesAssistedSnapshotsTest-64252410 tempest-VolumesAssistedSnapshotsTest-64252410-project-member] [instance: 24b4c233-c874-452e-a7fc-492ca2a49a09] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 625.568366] env[62503]: DEBUG oslo_concurrency.lockutils [None req-6b7abc00-006d-4855-9995-538e6edbbf28 tempest-ServersWithSpecificFlavorTestJSON-213993766 tempest-ServersWithSpecificFlavorTestJSON-213993766-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 625.701994] env[62503]: DEBUG oslo_concurrency.lockutils [None req-280dd265-183b-4c2b-ae27-d0e1a87186ff tempest-ServersAdmin275Test-1807589186 tempest-ServersAdmin275Test-1807589186-project-admin] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.010s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 625.703109] env[62503]: DEBUG oslo_concurrency.lockutils [None req-6fd2735d-f55f-4cf9-b0ab-01612c901fa1 tempest-AttachInterfacesUnderV243Test-714583448 tempest-AttachInterfacesUnderV243Test-714583448-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 19.109s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 625.813500] env[62503]: DEBUG oslo_concurrency.lockutils [None req-2db54e89-b7c8-428c-855a-ae3533893b41 tempest-VolumesAssistedSnapshotsTest-64252410 tempest-VolumesAssistedSnapshotsTest-64252410-project-member] Releasing lock "refresh_cache-24b4c233-c874-452e-a7fc-492ca2a49a09" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 625.813741] env[62503]: DEBUG nova.compute.manager [None req-2db54e89-b7c8-428c-855a-ae3533893b41 tempest-VolumesAssistedSnapshotsTest-64252410 tempest-VolumesAssistedSnapshotsTest-64252410-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62503) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3032}} [ 625.813924] env[62503]: DEBUG nova.compute.manager [None req-2db54e89-b7c8-428c-855a-ae3533893b41 tempest-VolumesAssistedSnapshotsTest-64252410 tempest-VolumesAssistedSnapshotsTest-64252410-project-member] [instance: 24b4c233-c874-452e-a7fc-492ca2a49a09] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 625.814107] env[62503]: DEBUG nova.network.neutron [None req-2db54e89-b7c8-428c-855a-ae3533893b41 tempest-VolumesAssistedSnapshotsTest-64252410 tempest-VolumesAssistedSnapshotsTest-64252410-project-member] [instance: 24b4c233-c874-452e-a7fc-492ca2a49a09] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 625.828930] env[62503]: DEBUG nova.network.neutron [None req-2db54e89-b7c8-428c-855a-ae3533893b41 tempest-VolumesAssistedSnapshotsTest-64252410 tempest-VolumesAssistedSnapshotsTest-64252410-project-member] [instance: 24b4c233-c874-452e-a7fc-492ca2a49a09] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 625.984886] env[62503]: INFO nova.scheduler.client.report [None req-cc9648da-f367-499f-9ae4-d540c12618f7 tempest-ServerActionsTestJSON-1148358597 tempest-ServerActionsTestJSON-1148358597-project-member] Deleted allocations for instance d8d4c087-9b0f-48c7-bd05-291a7e2a3e83 [ 626.331392] env[62503]: DEBUG nova.network.neutron [None req-2db54e89-b7c8-428c-855a-ae3533893b41 tempest-VolumesAssistedSnapshotsTest-64252410 tempest-VolumesAssistedSnapshotsTest-64252410-project-member] [instance: 24b4c233-c874-452e-a7fc-492ca2a49a09] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 626.498973] env[62503]: DEBUG oslo_concurrency.lockutils [None req-cc9648da-f367-499f-9ae4-d540c12618f7 tempest-ServerActionsTestJSON-1148358597 tempest-ServerActionsTestJSON-1148358597-project-member] Lock "d8d4c087-9b0f-48c7-bd05-291a7e2a3e83" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 74.445s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 626.563880] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd799471-ead9-4874-b300-7802644ba0d1 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.574413] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11df4a12-c4ec-4efd-9092-644f9f43000d {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.611165] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b94bdad6-1d38-4300-801c-498dec2c2fff {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.621296] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21e90ad3-9004-4623-90de-d4a65b9b5198 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.636747] env[62503]: DEBUG nova.compute.provider_tree [None req-6fd2735d-f55f-4cf9-b0ab-01612c901fa1 tempest-AttachInterfacesUnderV243Test-714583448 tempest-AttachInterfacesUnderV243Test-714583448-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 626.835917] env[62503]: INFO nova.compute.manager [None req-2db54e89-b7c8-428c-855a-ae3533893b41 tempest-VolumesAssistedSnapshotsTest-64252410 tempest-VolumesAssistedSnapshotsTest-64252410-project-member] [instance: 24b4c233-c874-452e-a7fc-492ca2a49a09] Took 1.02 seconds to deallocate network for instance. [ 627.003753] env[62503]: DEBUG nova.compute.manager [None req-fa417a80-55e3-4106-aabc-7d0e3e09907d tempest-ListImageFiltersTestJSON-1898919175 tempest-ListImageFiltersTestJSON-1898919175-project-member] [instance: f0fefd82-1670-4bbb-b250-da0c3b6ca3f6] Starting instance... {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 627.145623] env[62503]: DEBUG nova.scheduler.client.report [None req-6fd2735d-f55f-4cf9-b0ab-01612c901fa1 tempest-AttachInterfacesUnderV243Test-714583448 tempest-AttachInterfacesUnderV243Test-714583448-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 627.533376] env[62503]: DEBUG oslo_concurrency.lockutils [None req-fa417a80-55e3-4106-aabc-7d0e3e09907d tempest-ListImageFiltersTestJSON-1898919175 tempest-ListImageFiltersTestJSON-1898919175-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 627.653794] env[62503]: DEBUG oslo_concurrency.lockutils [None req-6fd2735d-f55f-4cf9-b0ab-01612c901fa1 tempest-AttachInterfacesUnderV243Test-714583448 tempest-AttachInterfacesUnderV243Test-714583448-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.950s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 627.654021] env[62503]: ERROR nova.compute.manager [None req-6fd2735d-f55f-4cf9-b0ab-01612c901fa1 tempest-AttachInterfacesUnderV243Test-714583448 tempest-AttachInterfacesUnderV243Test-714583448-project-member] [instance: 20bf8c62-8b80-45c2-98d4-5a960f465aa0] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 3d2fc2a3-fced-4fde-98d7-29167b92782b, please check neutron logs for more information. [ 627.654021] env[62503]: ERROR nova.compute.manager [instance: 20bf8c62-8b80-45c2-98d4-5a960f465aa0] Traceback (most recent call last): [ 627.654021] env[62503]: ERROR nova.compute.manager [instance: 20bf8c62-8b80-45c2-98d4-5a960f465aa0] File "/opt/stack/nova/nova/compute/manager.py", line 2644, in _build_and_run_instance [ 627.654021] env[62503]: ERROR nova.compute.manager [instance: 20bf8c62-8b80-45c2-98d4-5a960f465aa0] self.driver.spawn(context, instance, image_meta, [ 627.654021] env[62503]: ERROR nova.compute.manager [instance: 20bf8c62-8b80-45c2-98d4-5a960f465aa0] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 627.654021] env[62503]: ERROR nova.compute.manager [instance: 20bf8c62-8b80-45c2-98d4-5a960f465aa0] self._vmops.spawn(context, instance, image_meta, injected_files, [ 627.654021] env[62503]: ERROR nova.compute.manager [instance: 20bf8c62-8b80-45c2-98d4-5a960f465aa0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 627.654021] env[62503]: ERROR nova.compute.manager [instance: 20bf8c62-8b80-45c2-98d4-5a960f465aa0] vm_ref = self.build_virtual_machine(instance, [ 627.654021] env[62503]: ERROR nova.compute.manager [instance: 20bf8c62-8b80-45c2-98d4-5a960f465aa0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 627.654021] env[62503]: ERROR nova.compute.manager [instance: 20bf8c62-8b80-45c2-98d4-5a960f465aa0] vif_infos = vmwarevif.get_vif_info(self._session, [ 627.654021] env[62503]: ERROR nova.compute.manager [instance: 20bf8c62-8b80-45c2-98d4-5a960f465aa0] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 627.654361] env[62503]: ERROR nova.compute.manager [instance: 20bf8c62-8b80-45c2-98d4-5a960f465aa0] for vif in network_info: [ 627.654361] env[62503]: ERROR nova.compute.manager [instance: 20bf8c62-8b80-45c2-98d4-5a960f465aa0] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 627.654361] env[62503]: ERROR nova.compute.manager [instance: 20bf8c62-8b80-45c2-98d4-5a960f465aa0] return self._sync_wrapper(fn, *args, **kwargs) [ 627.654361] env[62503]: ERROR nova.compute.manager [instance: 20bf8c62-8b80-45c2-98d4-5a960f465aa0] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 627.654361] env[62503]: ERROR nova.compute.manager [instance: 20bf8c62-8b80-45c2-98d4-5a960f465aa0] self.wait() [ 627.654361] env[62503]: ERROR nova.compute.manager [instance: 20bf8c62-8b80-45c2-98d4-5a960f465aa0] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 627.654361] env[62503]: ERROR nova.compute.manager [instance: 20bf8c62-8b80-45c2-98d4-5a960f465aa0] self[:] = self._gt.wait() [ 627.654361] env[62503]: ERROR nova.compute.manager [instance: 20bf8c62-8b80-45c2-98d4-5a960f465aa0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 627.654361] env[62503]: ERROR nova.compute.manager [instance: 20bf8c62-8b80-45c2-98d4-5a960f465aa0] return self._exit_event.wait() [ 627.654361] env[62503]: ERROR nova.compute.manager [instance: 20bf8c62-8b80-45c2-98d4-5a960f465aa0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 627.654361] env[62503]: ERROR nova.compute.manager [instance: 20bf8c62-8b80-45c2-98d4-5a960f465aa0] result = hub.switch() [ 627.654361] env[62503]: ERROR nova.compute.manager [instance: 20bf8c62-8b80-45c2-98d4-5a960f465aa0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 627.654361] env[62503]: ERROR nova.compute.manager [instance: 20bf8c62-8b80-45c2-98d4-5a960f465aa0] return self.greenlet.switch() [ 627.654697] env[62503]: ERROR nova.compute.manager [instance: 20bf8c62-8b80-45c2-98d4-5a960f465aa0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 627.654697] env[62503]: ERROR nova.compute.manager [instance: 20bf8c62-8b80-45c2-98d4-5a960f465aa0] result = function(*args, **kwargs) [ 627.654697] env[62503]: ERROR nova.compute.manager [instance: 20bf8c62-8b80-45c2-98d4-5a960f465aa0] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 627.654697] env[62503]: ERROR nova.compute.manager [instance: 20bf8c62-8b80-45c2-98d4-5a960f465aa0] return func(*args, **kwargs) [ 627.654697] env[62503]: ERROR nova.compute.manager [instance: 20bf8c62-8b80-45c2-98d4-5a960f465aa0] File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 627.654697] env[62503]: ERROR nova.compute.manager [instance: 20bf8c62-8b80-45c2-98d4-5a960f465aa0] raise e [ 627.654697] env[62503]: ERROR nova.compute.manager [instance: 20bf8c62-8b80-45c2-98d4-5a960f465aa0] File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 627.654697] env[62503]: ERROR nova.compute.manager [instance: 20bf8c62-8b80-45c2-98d4-5a960f465aa0] nwinfo = self.network_api.allocate_for_instance( [ 627.654697] env[62503]: ERROR nova.compute.manager [instance: 20bf8c62-8b80-45c2-98d4-5a960f465aa0] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 627.654697] env[62503]: ERROR nova.compute.manager [instance: 20bf8c62-8b80-45c2-98d4-5a960f465aa0] created_port_ids = self._update_ports_for_instance( [ 627.654697] env[62503]: ERROR nova.compute.manager [instance: 20bf8c62-8b80-45c2-98d4-5a960f465aa0] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 627.654697] env[62503]: ERROR nova.compute.manager [instance: 20bf8c62-8b80-45c2-98d4-5a960f465aa0] with excutils.save_and_reraise_exception(): [ 627.654697] env[62503]: ERROR nova.compute.manager [instance: 20bf8c62-8b80-45c2-98d4-5a960f465aa0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 627.655054] env[62503]: ERROR nova.compute.manager [instance: 20bf8c62-8b80-45c2-98d4-5a960f465aa0] self.force_reraise() [ 627.655054] env[62503]: ERROR nova.compute.manager [instance: 20bf8c62-8b80-45c2-98d4-5a960f465aa0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 627.655054] env[62503]: ERROR nova.compute.manager [instance: 20bf8c62-8b80-45c2-98d4-5a960f465aa0] raise self.value [ 627.655054] env[62503]: ERROR nova.compute.manager [instance: 20bf8c62-8b80-45c2-98d4-5a960f465aa0] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 627.655054] env[62503]: ERROR nova.compute.manager [instance: 20bf8c62-8b80-45c2-98d4-5a960f465aa0] updated_port = self._update_port( [ 627.655054] env[62503]: ERROR nova.compute.manager [instance: 20bf8c62-8b80-45c2-98d4-5a960f465aa0] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 627.655054] env[62503]: ERROR nova.compute.manager [instance: 20bf8c62-8b80-45c2-98d4-5a960f465aa0] _ensure_no_port_binding_failure(port) [ 627.655054] env[62503]: ERROR nova.compute.manager [instance: 20bf8c62-8b80-45c2-98d4-5a960f465aa0] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 627.655054] env[62503]: ERROR nova.compute.manager [instance: 20bf8c62-8b80-45c2-98d4-5a960f465aa0] raise exception.PortBindingFailed(port_id=port['id']) [ 627.655054] env[62503]: ERROR nova.compute.manager [instance: 20bf8c62-8b80-45c2-98d4-5a960f465aa0] nova.exception.PortBindingFailed: Binding failed for port 3d2fc2a3-fced-4fde-98d7-29167b92782b, please check neutron logs for more information. [ 627.655054] env[62503]: ERROR nova.compute.manager [instance: 20bf8c62-8b80-45c2-98d4-5a960f465aa0] [ 627.655297] env[62503]: DEBUG nova.compute.utils [None req-6fd2735d-f55f-4cf9-b0ab-01612c901fa1 tempest-AttachInterfacesUnderV243Test-714583448 tempest-AttachInterfacesUnderV243Test-714583448-project-member] [instance: 20bf8c62-8b80-45c2-98d4-5a960f465aa0] Binding failed for port 3d2fc2a3-fced-4fde-98d7-29167b92782b, please check neutron logs for more information. {{(pid=62503) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 627.656588] env[62503]: DEBUG nova.compute.manager [None req-6fd2735d-f55f-4cf9-b0ab-01612c901fa1 tempest-AttachInterfacesUnderV243Test-714583448 tempest-AttachInterfacesUnderV243Test-714583448-project-member] [instance: 20bf8c62-8b80-45c2-98d4-5a960f465aa0] Build of instance 20bf8c62-8b80-45c2-98d4-5a960f465aa0 was re-scheduled: Binding failed for port 3d2fc2a3-fced-4fde-98d7-29167b92782b, please check neutron logs for more information. {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2483}} [ 627.657107] env[62503]: DEBUG nova.compute.manager [None req-6fd2735d-f55f-4cf9-b0ab-01612c901fa1 tempest-AttachInterfacesUnderV243Test-714583448 tempest-AttachInterfacesUnderV243Test-714583448-project-member] [instance: 20bf8c62-8b80-45c2-98d4-5a960f465aa0] Unplugging VIFs for instance {{(pid=62503) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3009}} [ 627.657381] env[62503]: DEBUG oslo_concurrency.lockutils [None req-6fd2735d-f55f-4cf9-b0ab-01612c901fa1 tempest-AttachInterfacesUnderV243Test-714583448 tempest-AttachInterfacesUnderV243Test-714583448-project-member] Acquiring lock "refresh_cache-20bf8c62-8b80-45c2-98d4-5a960f465aa0" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 627.657570] env[62503]: DEBUG oslo_concurrency.lockutils [None req-6fd2735d-f55f-4cf9-b0ab-01612c901fa1 tempest-AttachInterfacesUnderV243Test-714583448 tempest-AttachInterfacesUnderV243Test-714583448-project-member] Acquired lock "refresh_cache-20bf8c62-8b80-45c2-98d4-5a960f465aa0" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 627.658201] env[62503]: DEBUG nova.network.neutron [None req-6fd2735d-f55f-4cf9-b0ab-01612c901fa1 tempest-AttachInterfacesUnderV243Test-714583448 tempest-AttachInterfacesUnderV243Test-714583448-project-member] [instance: 20bf8c62-8b80-45c2-98d4-5a960f465aa0] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 627.661598] env[62503]: DEBUG oslo_concurrency.lockutils [None req-011ff5ca-86ef-4873-993e-6255c4300036 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 20.780s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 627.661901] env[62503]: DEBUG nova.objects.instance [None req-011ff5ca-86ef-4873-993e-6255c4300036 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Lazy-loading 'resources' on Instance uuid c4a88e75-690f-4bed-a4f9-a0de3b193eff {{(pid=62503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 628.490155] env[62503]: INFO nova.scheduler.client.report [None req-2db54e89-b7c8-428c-855a-ae3533893b41 tempest-VolumesAssistedSnapshotsTest-64252410 tempest-VolumesAssistedSnapshotsTest-64252410-project-member] Deleted allocations for instance 24b4c233-c874-452e-a7fc-492ca2a49a09 [ 628.498627] env[62503]: DEBUG nova.network.neutron [None req-6fd2735d-f55f-4cf9-b0ab-01612c901fa1 tempest-AttachInterfacesUnderV243Test-714583448 tempest-AttachInterfacesUnderV243Test-714583448-project-member] [instance: 20bf8c62-8b80-45c2-98d4-5a960f465aa0] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 628.851281] env[62503]: DEBUG nova.network.neutron [None req-6fd2735d-f55f-4cf9-b0ab-01612c901fa1 tempest-AttachInterfacesUnderV243Test-714583448 tempest-AttachInterfacesUnderV243Test-714583448-project-member] [instance: 20bf8c62-8b80-45c2-98d4-5a960f465aa0] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 628.920102] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b94fa93-5a96-44aa-a8ed-f15c5c0fe312 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.927270] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e64d8d01-fe55-4785-897f-0f304b84464a {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.962011] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b3efb01-f946-434e-a768-8b9e925b3b95 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.973236] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88451678-d6a3-4cd0-bdbf-67b795e17605 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.988286] env[62503]: DEBUG nova.compute.provider_tree [None req-011ff5ca-86ef-4873-993e-6255c4300036 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 628.998988] env[62503]: DEBUG oslo_concurrency.lockutils [None req-2db54e89-b7c8-428c-855a-ae3533893b41 tempest-VolumesAssistedSnapshotsTest-64252410 tempest-VolumesAssistedSnapshotsTest-64252410-project-member] Lock "24b4c233-c874-452e-a7fc-492ca2a49a09" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 70.431s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 629.354922] env[62503]: DEBUG oslo_concurrency.lockutils [None req-6fd2735d-f55f-4cf9-b0ab-01612c901fa1 tempest-AttachInterfacesUnderV243Test-714583448 tempest-AttachInterfacesUnderV243Test-714583448-project-member] Releasing lock "refresh_cache-20bf8c62-8b80-45c2-98d4-5a960f465aa0" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 629.355269] env[62503]: DEBUG nova.compute.manager [None req-6fd2735d-f55f-4cf9-b0ab-01612c901fa1 tempest-AttachInterfacesUnderV243Test-714583448 tempest-AttachInterfacesUnderV243Test-714583448-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62503) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3032}} [ 629.355535] env[62503]: DEBUG nova.compute.manager [None req-6fd2735d-f55f-4cf9-b0ab-01612c901fa1 tempest-AttachInterfacesUnderV243Test-714583448 tempest-AttachInterfacesUnderV243Test-714583448-project-member] [instance: 20bf8c62-8b80-45c2-98d4-5a960f465aa0] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 629.355748] env[62503]: DEBUG nova.network.neutron [None req-6fd2735d-f55f-4cf9-b0ab-01612c901fa1 tempest-AttachInterfacesUnderV243Test-714583448 tempest-AttachInterfacesUnderV243Test-714583448-project-member] [instance: 20bf8c62-8b80-45c2-98d4-5a960f465aa0] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 629.371761] env[62503]: DEBUG nova.network.neutron [None req-6fd2735d-f55f-4cf9-b0ab-01612c901fa1 tempest-AttachInterfacesUnderV243Test-714583448 tempest-AttachInterfacesUnderV243Test-714583448-project-member] [instance: 20bf8c62-8b80-45c2-98d4-5a960f465aa0] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 629.491642] env[62503]: DEBUG nova.scheduler.client.report [None req-011ff5ca-86ef-4873-993e-6255c4300036 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 629.501027] env[62503]: DEBUG nova.compute.manager [None req-856c8a58-cdb0-4f72-ae8c-c9a975e5795d tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: 0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4] Starting instance... {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 629.874586] env[62503]: DEBUG nova.network.neutron [None req-6fd2735d-f55f-4cf9-b0ab-01612c901fa1 tempest-AttachInterfacesUnderV243Test-714583448 tempest-AttachInterfacesUnderV243Test-714583448-project-member] [instance: 20bf8c62-8b80-45c2-98d4-5a960f465aa0] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 629.997509] env[62503]: DEBUG oslo_concurrency.lockutils [None req-011ff5ca-86ef-4873-993e-6255c4300036 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.336s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 629.999830] env[62503]: DEBUG oslo_concurrency.lockutils [None req-4bad5bb1-7fa9-4e2b-9ed4-ab328e1c526a tempest-ServerActionsTestOtherB-427346871 tempest-ServerActionsTestOtherB-427346871-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 21.768s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 630.020127] env[62503]: DEBUG oslo_concurrency.lockutils [None req-856c8a58-cdb0-4f72-ae8c-c9a975e5795d tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 630.030095] env[62503]: INFO nova.scheduler.client.report [None req-011ff5ca-86ef-4873-993e-6255c4300036 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Deleted allocations for instance c4a88e75-690f-4bed-a4f9-a0de3b193eff [ 630.377698] env[62503]: INFO nova.compute.manager [None req-6fd2735d-f55f-4cf9-b0ab-01612c901fa1 tempest-AttachInterfacesUnderV243Test-714583448 tempest-AttachInterfacesUnderV243Test-714583448-project-member] [instance: 20bf8c62-8b80-45c2-98d4-5a960f465aa0] Took 1.02 seconds to deallocate network for instance. [ 630.540015] env[62503]: DEBUG oslo_concurrency.lockutils [None req-011ff5ca-86ef-4873-993e-6255c4300036 tempest-ServersAdmin275Test-1763865026 tempest-ServersAdmin275Test-1763865026-project-member] Lock "c4a88e75-690f-4bed-a4f9-a0de3b193eff" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 27.529s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 630.858249] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f70dbc7f-b6e1-4b33-8932-3df57ccbead6 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.865938] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffb944cc-3ca8-4f8e-bf58-036c7337c61c {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.901037] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e4332ca-e617-4afd-9e03-663a7508bc61 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.909684] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12647cde-be36-465c-8a50-1dc3f35c2cc8 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.924393] env[62503]: DEBUG nova.compute.provider_tree [None req-4bad5bb1-7fa9-4e2b-9ed4-ab328e1c526a tempest-ServerActionsTestOtherB-427346871 tempest-ServerActionsTestOtherB-427346871-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 631.428780] env[62503]: DEBUG nova.scheduler.client.report [None req-4bad5bb1-7fa9-4e2b-9ed4-ab328e1c526a tempest-ServerActionsTestOtherB-427346871 tempest-ServerActionsTestOtherB-427346871-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 631.434308] env[62503]: INFO nova.scheduler.client.report [None req-6fd2735d-f55f-4cf9-b0ab-01612c901fa1 tempest-AttachInterfacesUnderV243Test-714583448 tempest-AttachInterfacesUnderV243Test-714583448-project-member] Deleted allocations for instance 20bf8c62-8b80-45c2-98d4-5a960f465aa0 [ 631.938204] env[62503]: DEBUG oslo_concurrency.lockutils [None req-4bad5bb1-7fa9-4e2b-9ed4-ab328e1c526a tempest-ServerActionsTestOtherB-427346871 tempest-ServerActionsTestOtherB-427346871-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.935s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 631.938204] env[62503]: ERROR nova.compute.manager [None req-4bad5bb1-7fa9-4e2b-9ed4-ab328e1c526a tempest-ServerActionsTestOtherB-427346871 tempest-ServerActionsTestOtherB-427346871-project-member] [instance: bef7d4e7-9af2-4071-ae6d-bdbfa7f46460] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 132a000a-ea35-46aa-8853-38a0fba77bc1, please check neutron logs for more information. [ 631.938204] env[62503]: ERROR nova.compute.manager [instance: bef7d4e7-9af2-4071-ae6d-bdbfa7f46460] Traceback (most recent call last): [ 631.938204] env[62503]: ERROR nova.compute.manager [instance: bef7d4e7-9af2-4071-ae6d-bdbfa7f46460] File "/opt/stack/nova/nova/compute/manager.py", line 2644, in _build_and_run_instance [ 631.938204] env[62503]: ERROR nova.compute.manager [instance: bef7d4e7-9af2-4071-ae6d-bdbfa7f46460] self.driver.spawn(context, instance, image_meta, [ 631.938204] env[62503]: ERROR nova.compute.manager [instance: bef7d4e7-9af2-4071-ae6d-bdbfa7f46460] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 631.938204] env[62503]: ERROR nova.compute.manager [instance: bef7d4e7-9af2-4071-ae6d-bdbfa7f46460] self._vmops.spawn(context, instance, image_meta, injected_files, [ 631.938204] env[62503]: ERROR nova.compute.manager [instance: bef7d4e7-9af2-4071-ae6d-bdbfa7f46460] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 631.938204] env[62503]: ERROR nova.compute.manager [instance: bef7d4e7-9af2-4071-ae6d-bdbfa7f46460] vm_ref = self.build_virtual_machine(instance, [ 631.938616] env[62503]: ERROR nova.compute.manager [instance: bef7d4e7-9af2-4071-ae6d-bdbfa7f46460] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 631.938616] env[62503]: ERROR nova.compute.manager [instance: bef7d4e7-9af2-4071-ae6d-bdbfa7f46460] vif_infos = vmwarevif.get_vif_info(self._session, [ 631.938616] env[62503]: ERROR nova.compute.manager [instance: bef7d4e7-9af2-4071-ae6d-bdbfa7f46460] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 631.938616] env[62503]: ERROR nova.compute.manager [instance: bef7d4e7-9af2-4071-ae6d-bdbfa7f46460] for vif in network_info: [ 631.938616] env[62503]: ERROR nova.compute.manager [instance: bef7d4e7-9af2-4071-ae6d-bdbfa7f46460] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 631.938616] env[62503]: ERROR nova.compute.manager [instance: bef7d4e7-9af2-4071-ae6d-bdbfa7f46460] return self._sync_wrapper(fn, *args, **kwargs) [ 631.938616] env[62503]: ERROR nova.compute.manager [instance: bef7d4e7-9af2-4071-ae6d-bdbfa7f46460] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 631.938616] env[62503]: ERROR nova.compute.manager [instance: bef7d4e7-9af2-4071-ae6d-bdbfa7f46460] self.wait() [ 631.938616] env[62503]: ERROR nova.compute.manager [instance: bef7d4e7-9af2-4071-ae6d-bdbfa7f46460] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 631.938616] env[62503]: ERROR nova.compute.manager [instance: bef7d4e7-9af2-4071-ae6d-bdbfa7f46460] self[:] = self._gt.wait() [ 631.938616] env[62503]: ERROR nova.compute.manager [instance: bef7d4e7-9af2-4071-ae6d-bdbfa7f46460] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 631.938616] env[62503]: ERROR nova.compute.manager [instance: bef7d4e7-9af2-4071-ae6d-bdbfa7f46460] return self._exit_event.wait() [ 631.938616] env[62503]: ERROR nova.compute.manager [instance: bef7d4e7-9af2-4071-ae6d-bdbfa7f46460] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 631.938979] env[62503]: ERROR nova.compute.manager [instance: bef7d4e7-9af2-4071-ae6d-bdbfa7f46460] result = hub.switch() [ 631.938979] env[62503]: ERROR nova.compute.manager [instance: bef7d4e7-9af2-4071-ae6d-bdbfa7f46460] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 631.938979] env[62503]: ERROR nova.compute.manager [instance: bef7d4e7-9af2-4071-ae6d-bdbfa7f46460] return self.greenlet.switch() [ 631.938979] env[62503]: ERROR nova.compute.manager [instance: bef7d4e7-9af2-4071-ae6d-bdbfa7f46460] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 631.938979] env[62503]: ERROR nova.compute.manager [instance: bef7d4e7-9af2-4071-ae6d-bdbfa7f46460] result = function(*args, **kwargs) [ 631.938979] env[62503]: ERROR nova.compute.manager [instance: bef7d4e7-9af2-4071-ae6d-bdbfa7f46460] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 631.938979] env[62503]: ERROR nova.compute.manager [instance: bef7d4e7-9af2-4071-ae6d-bdbfa7f46460] return func(*args, **kwargs) [ 631.938979] env[62503]: ERROR nova.compute.manager [instance: bef7d4e7-9af2-4071-ae6d-bdbfa7f46460] File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 631.938979] env[62503]: ERROR nova.compute.manager [instance: bef7d4e7-9af2-4071-ae6d-bdbfa7f46460] raise e [ 631.938979] env[62503]: ERROR nova.compute.manager [instance: bef7d4e7-9af2-4071-ae6d-bdbfa7f46460] File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 631.938979] env[62503]: ERROR nova.compute.manager [instance: bef7d4e7-9af2-4071-ae6d-bdbfa7f46460] nwinfo = self.network_api.allocate_for_instance( [ 631.938979] env[62503]: ERROR nova.compute.manager [instance: bef7d4e7-9af2-4071-ae6d-bdbfa7f46460] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 631.938979] env[62503]: ERROR nova.compute.manager [instance: bef7d4e7-9af2-4071-ae6d-bdbfa7f46460] created_port_ids = self._update_ports_for_instance( [ 631.939334] env[62503]: ERROR nova.compute.manager [instance: bef7d4e7-9af2-4071-ae6d-bdbfa7f46460] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 631.939334] env[62503]: ERROR nova.compute.manager [instance: bef7d4e7-9af2-4071-ae6d-bdbfa7f46460] with excutils.save_and_reraise_exception(): [ 631.939334] env[62503]: ERROR nova.compute.manager [instance: bef7d4e7-9af2-4071-ae6d-bdbfa7f46460] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 631.939334] env[62503]: ERROR nova.compute.manager [instance: bef7d4e7-9af2-4071-ae6d-bdbfa7f46460] self.force_reraise() [ 631.939334] env[62503]: ERROR nova.compute.manager [instance: bef7d4e7-9af2-4071-ae6d-bdbfa7f46460] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 631.939334] env[62503]: ERROR nova.compute.manager [instance: bef7d4e7-9af2-4071-ae6d-bdbfa7f46460] raise self.value [ 631.939334] env[62503]: ERROR nova.compute.manager [instance: bef7d4e7-9af2-4071-ae6d-bdbfa7f46460] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 631.939334] env[62503]: ERROR nova.compute.manager [instance: bef7d4e7-9af2-4071-ae6d-bdbfa7f46460] updated_port = self._update_port( [ 631.939334] env[62503]: ERROR nova.compute.manager [instance: bef7d4e7-9af2-4071-ae6d-bdbfa7f46460] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 631.939334] env[62503]: ERROR nova.compute.manager [instance: bef7d4e7-9af2-4071-ae6d-bdbfa7f46460] _ensure_no_port_binding_failure(port) [ 631.939334] env[62503]: ERROR nova.compute.manager [instance: bef7d4e7-9af2-4071-ae6d-bdbfa7f46460] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 631.939334] env[62503]: ERROR nova.compute.manager [instance: bef7d4e7-9af2-4071-ae6d-bdbfa7f46460] raise exception.PortBindingFailed(port_id=port['id']) [ 631.939625] env[62503]: ERROR nova.compute.manager [instance: bef7d4e7-9af2-4071-ae6d-bdbfa7f46460] nova.exception.PortBindingFailed: Binding failed for port 132a000a-ea35-46aa-8853-38a0fba77bc1, please check neutron logs for more information. [ 631.939625] env[62503]: ERROR nova.compute.manager [instance: bef7d4e7-9af2-4071-ae6d-bdbfa7f46460] [ 631.939625] env[62503]: DEBUG nova.compute.utils [None req-4bad5bb1-7fa9-4e2b-9ed4-ab328e1c526a tempest-ServerActionsTestOtherB-427346871 tempest-ServerActionsTestOtherB-427346871-project-member] [instance: bef7d4e7-9af2-4071-ae6d-bdbfa7f46460] Binding failed for port 132a000a-ea35-46aa-8853-38a0fba77bc1, please check neutron logs for more information. {{(pid=62503) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 631.939625] env[62503]: DEBUG oslo_concurrency.lockutils [None req-68ed7e64-78f9-431d-ac11-4e539144d770 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 22.296s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 631.943207] env[62503]: DEBUG nova.compute.manager [None req-4bad5bb1-7fa9-4e2b-9ed4-ab328e1c526a tempest-ServerActionsTestOtherB-427346871 tempest-ServerActionsTestOtherB-427346871-project-member] [instance: bef7d4e7-9af2-4071-ae6d-bdbfa7f46460] Build of instance bef7d4e7-9af2-4071-ae6d-bdbfa7f46460 was re-scheduled: Binding failed for port 132a000a-ea35-46aa-8853-38a0fba77bc1, please check neutron logs for more information. {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2483}} [ 631.943790] env[62503]: DEBUG nova.compute.manager [None req-4bad5bb1-7fa9-4e2b-9ed4-ab328e1c526a tempest-ServerActionsTestOtherB-427346871 tempest-ServerActionsTestOtherB-427346871-project-member] [instance: bef7d4e7-9af2-4071-ae6d-bdbfa7f46460] Unplugging VIFs for instance {{(pid=62503) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3009}} [ 631.944121] env[62503]: DEBUG oslo_concurrency.lockutils [None req-4bad5bb1-7fa9-4e2b-9ed4-ab328e1c526a tempest-ServerActionsTestOtherB-427346871 tempest-ServerActionsTestOtherB-427346871-project-member] Acquiring lock "refresh_cache-bef7d4e7-9af2-4071-ae6d-bdbfa7f46460" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 631.945130] env[62503]: DEBUG oslo_concurrency.lockutils [None req-4bad5bb1-7fa9-4e2b-9ed4-ab328e1c526a tempest-ServerActionsTestOtherB-427346871 tempest-ServerActionsTestOtherB-427346871-project-member] Acquired lock "refresh_cache-bef7d4e7-9af2-4071-ae6d-bdbfa7f46460" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 631.945130] env[62503]: DEBUG nova.network.neutron [None req-4bad5bb1-7fa9-4e2b-9ed4-ab328e1c526a tempest-ServerActionsTestOtherB-427346871 tempest-ServerActionsTestOtherB-427346871-project-member] [instance: bef7d4e7-9af2-4071-ae6d-bdbfa7f46460] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 631.945671] env[62503]: DEBUG oslo_concurrency.lockutils [None req-6fd2735d-f55f-4cf9-b0ab-01612c901fa1 tempest-AttachInterfacesUnderV243Test-714583448 tempest-AttachInterfacesUnderV243Test-714583448-project-member] Lock "20bf8c62-8b80-45c2-98d4-5a960f465aa0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 70.941s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 632.454141] env[62503]: DEBUG nova.compute.manager [None req-812ef84d-18c3-4e01-ae3f-ea5c4ae1494f tempest-ListImageFiltersTestJSON-1898919175 tempest-ListImageFiltersTestJSON-1898919175-project-member] [instance: c32f170d-1e88-4716-a02a-b8db6896e900] Starting instance... {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 632.470729] env[62503]: DEBUG nova.network.neutron [None req-4bad5bb1-7fa9-4e2b-9ed4-ab328e1c526a tempest-ServerActionsTestOtherB-427346871 tempest-ServerActionsTestOtherB-427346871-project-member] [instance: bef7d4e7-9af2-4071-ae6d-bdbfa7f46460] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 632.551792] env[62503]: DEBUG nova.network.neutron [None req-4bad5bb1-7fa9-4e2b-9ed4-ab328e1c526a tempest-ServerActionsTestOtherB-427346871 tempest-ServerActionsTestOtherB-427346871-project-member] [instance: bef7d4e7-9af2-4071-ae6d-bdbfa7f46460] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 632.773173] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b73254f-5898-418a-8119-91099cd76664 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.788750] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a914127d-d2d7-4cc4-9eb7-d4755f7e9c25 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.823989] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2563e7fd-61b2-4117-963b-aedda594b875 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.832700] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52304edc-7adb-4849-b542-8e1f294db1ee {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.849208] env[62503]: DEBUG nova.compute.provider_tree [None req-68ed7e64-78f9-431d-ac11-4e539144d770 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 632.980256] env[62503]: DEBUG oslo_concurrency.lockutils [None req-812ef84d-18c3-4e01-ae3f-ea5c4ae1494f tempest-ListImageFiltersTestJSON-1898919175 tempest-ListImageFiltersTestJSON-1898919175-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 633.056578] env[62503]: DEBUG oslo_concurrency.lockutils [None req-4bad5bb1-7fa9-4e2b-9ed4-ab328e1c526a tempest-ServerActionsTestOtherB-427346871 tempest-ServerActionsTestOtherB-427346871-project-member] Releasing lock "refresh_cache-bef7d4e7-9af2-4071-ae6d-bdbfa7f46460" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 633.056834] env[62503]: DEBUG nova.compute.manager [None req-4bad5bb1-7fa9-4e2b-9ed4-ab328e1c526a tempest-ServerActionsTestOtherB-427346871 tempest-ServerActionsTestOtherB-427346871-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62503) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3032}} [ 633.057025] env[62503]: DEBUG nova.compute.manager [None req-4bad5bb1-7fa9-4e2b-9ed4-ab328e1c526a tempest-ServerActionsTestOtherB-427346871 tempest-ServerActionsTestOtherB-427346871-project-member] [instance: bef7d4e7-9af2-4071-ae6d-bdbfa7f46460] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 633.057544] env[62503]: DEBUG nova.network.neutron [None req-4bad5bb1-7fa9-4e2b-9ed4-ab328e1c526a tempest-ServerActionsTestOtherB-427346871 tempest-ServerActionsTestOtherB-427346871-project-member] [instance: bef7d4e7-9af2-4071-ae6d-bdbfa7f46460] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 633.081084] env[62503]: DEBUG nova.network.neutron [None req-4bad5bb1-7fa9-4e2b-9ed4-ab328e1c526a tempest-ServerActionsTestOtherB-427346871 tempest-ServerActionsTestOtherB-427346871-project-member] [instance: bef7d4e7-9af2-4071-ae6d-bdbfa7f46460] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 633.353102] env[62503]: DEBUG nova.scheduler.client.report [None req-68ed7e64-78f9-431d-ac11-4e539144d770 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 633.583620] env[62503]: DEBUG nova.network.neutron [None req-4bad5bb1-7fa9-4e2b-9ed4-ab328e1c526a tempest-ServerActionsTestOtherB-427346871 tempest-ServerActionsTestOtherB-427346871-project-member] [instance: bef7d4e7-9af2-4071-ae6d-bdbfa7f46460] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 633.859124] env[62503]: DEBUG oslo_concurrency.lockutils [None req-68ed7e64-78f9-431d-ac11-4e539144d770 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.921s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 633.859805] env[62503]: ERROR nova.compute.manager [None req-68ed7e64-78f9-431d-ac11-4e539144d770 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: eed1dcc3-d9f9-4211-a4c3-850dcdad72b1] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port f4b3539e-f084-44c5-a8e5-9aa2e32329cd, please check neutron logs for more information. [ 633.859805] env[62503]: ERROR nova.compute.manager [instance: eed1dcc3-d9f9-4211-a4c3-850dcdad72b1] Traceback (most recent call last): [ 633.859805] env[62503]: ERROR nova.compute.manager [instance: eed1dcc3-d9f9-4211-a4c3-850dcdad72b1] File "/opt/stack/nova/nova/compute/manager.py", line 2644, in _build_and_run_instance [ 633.859805] env[62503]: ERROR nova.compute.manager [instance: eed1dcc3-d9f9-4211-a4c3-850dcdad72b1] self.driver.spawn(context, instance, image_meta, [ 633.859805] env[62503]: ERROR nova.compute.manager [instance: eed1dcc3-d9f9-4211-a4c3-850dcdad72b1] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 633.859805] env[62503]: ERROR nova.compute.manager [instance: eed1dcc3-d9f9-4211-a4c3-850dcdad72b1] self._vmops.spawn(context, instance, image_meta, injected_files, [ 633.859805] env[62503]: ERROR nova.compute.manager [instance: eed1dcc3-d9f9-4211-a4c3-850dcdad72b1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 633.859805] env[62503]: ERROR nova.compute.manager [instance: eed1dcc3-d9f9-4211-a4c3-850dcdad72b1] vm_ref = self.build_virtual_machine(instance, [ 633.859805] env[62503]: ERROR nova.compute.manager [instance: eed1dcc3-d9f9-4211-a4c3-850dcdad72b1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 633.859805] env[62503]: ERROR nova.compute.manager [instance: eed1dcc3-d9f9-4211-a4c3-850dcdad72b1] vif_infos = vmwarevif.get_vif_info(self._session, [ 633.859805] env[62503]: ERROR nova.compute.manager [instance: eed1dcc3-d9f9-4211-a4c3-850dcdad72b1] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 633.860099] env[62503]: ERROR nova.compute.manager [instance: eed1dcc3-d9f9-4211-a4c3-850dcdad72b1] for vif in network_info: [ 633.860099] env[62503]: ERROR nova.compute.manager [instance: eed1dcc3-d9f9-4211-a4c3-850dcdad72b1] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 633.860099] env[62503]: ERROR nova.compute.manager [instance: eed1dcc3-d9f9-4211-a4c3-850dcdad72b1] return self._sync_wrapper(fn, *args, **kwargs) [ 633.860099] env[62503]: ERROR nova.compute.manager [instance: eed1dcc3-d9f9-4211-a4c3-850dcdad72b1] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 633.860099] env[62503]: ERROR nova.compute.manager [instance: eed1dcc3-d9f9-4211-a4c3-850dcdad72b1] self.wait() [ 633.860099] env[62503]: ERROR nova.compute.manager [instance: eed1dcc3-d9f9-4211-a4c3-850dcdad72b1] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 633.860099] env[62503]: ERROR nova.compute.manager [instance: eed1dcc3-d9f9-4211-a4c3-850dcdad72b1] self[:] = self._gt.wait() [ 633.860099] env[62503]: ERROR nova.compute.manager [instance: eed1dcc3-d9f9-4211-a4c3-850dcdad72b1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 633.860099] env[62503]: ERROR nova.compute.manager [instance: eed1dcc3-d9f9-4211-a4c3-850dcdad72b1] return self._exit_event.wait() [ 633.860099] env[62503]: ERROR nova.compute.manager [instance: eed1dcc3-d9f9-4211-a4c3-850dcdad72b1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 633.860099] env[62503]: ERROR nova.compute.manager [instance: eed1dcc3-d9f9-4211-a4c3-850dcdad72b1] current.throw(*self._exc) [ 633.860099] env[62503]: ERROR nova.compute.manager [instance: eed1dcc3-d9f9-4211-a4c3-850dcdad72b1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 633.860099] env[62503]: ERROR nova.compute.manager [instance: eed1dcc3-d9f9-4211-a4c3-850dcdad72b1] result = function(*args, **kwargs) [ 633.860378] env[62503]: ERROR nova.compute.manager [instance: eed1dcc3-d9f9-4211-a4c3-850dcdad72b1] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 633.860378] env[62503]: ERROR nova.compute.manager [instance: eed1dcc3-d9f9-4211-a4c3-850dcdad72b1] return func(*args, **kwargs) [ 633.860378] env[62503]: ERROR nova.compute.manager [instance: eed1dcc3-d9f9-4211-a4c3-850dcdad72b1] File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 633.860378] env[62503]: ERROR nova.compute.manager [instance: eed1dcc3-d9f9-4211-a4c3-850dcdad72b1] raise e [ 633.860378] env[62503]: ERROR nova.compute.manager [instance: eed1dcc3-d9f9-4211-a4c3-850dcdad72b1] File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 633.860378] env[62503]: ERROR nova.compute.manager [instance: eed1dcc3-d9f9-4211-a4c3-850dcdad72b1] nwinfo = self.network_api.allocate_for_instance( [ 633.860378] env[62503]: ERROR nova.compute.manager [instance: eed1dcc3-d9f9-4211-a4c3-850dcdad72b1] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 633.860378] env[62503]: ERROR nova.compute.manager [instance: eed1dcc3-d9f9-4211-a4c3-850dcdad72b1] created_port_ids = self._update_ports_for_instance( [ 633.860378] env[62503]: ERROR nova.compute.manager [instance: eed1dcc3-d9f9-4211-a4c3-850dcdad72b1] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 633.860378] env[62503]: ERROR nova.compute.manager [instance: eed1dcc3-d9f9-4211-a4c3-850dcdad72b1] with excutils.save_and_reraise_exception(): [ 633.860378] env[62503]: ERROR nova.compute.manager [instance: eed1dcc3-d9f9-4211-a4c3-850dcdad72b1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 633.860378] env[62503]: ERROR nova.compute.manager [instance: eed1dcc3-d9f9-4211-a4c3-850dcdad72b1] self.force_reraise() [ 633.860378] env[62503]: ERROR nova.compute.manager [instance: eed1dcc3-d9f9-4211-a4c3-850dcdad72b1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 633.860652] env[62503]: ERROR nova.compute.manager [instance: eed1dcc3-d9f9-4211-a4c3-850dcdad72b1] raise self.value [ 633.860652] env[62503]: ERROR nova.compute.manager [instance: eed1dcc3-d9f9-4211-a4c3-850dcdad72b1] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 633.860652] env[62503]: ERROR nova.compute.manager [instance: eed1dcc3-d9f9-4211-a4c3-850dcdad72b1] updated_port = self._update_port( [ 633.860652] env[62503]: ERROR nova.compute.manager [instance: eed1dcc3-d9f9-4211-a4c3-850dcdad72b1] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 633.860652] env[62503]: ERROR nova.compute.manager [instance: eed1dcc3-d9f9-4211-a4c3-850dcdad72b1] _ensure_no_port_binding_failure(port) [ 633.860652] env[62503]: ERROR nova.compute.manager [instance: eed1dcc3-d9f9-4211-a4c3-850dcdad72b1] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 633.860652] env[62503]: ERROR nova.compute.manager [instance: eed1dcc3-d9f9-4211-a4c3-850dcdad72b1] raise exception.PortBindingFailed(port_id=port['id']) [ 633.860652] env[62503]: ERROR nova.compute.manager [instance: eed1dcc3-d9f9-4211-a4c3-850dcdad72b1] nova.exception.PortBindingFailed: Binding failed for port f4b3539e-f084-44c5-a8e5-9aa2e32329cd, please check neutron logs for more information. [ 633.860652] env[62503]: ERROR nova.compute.manager [instance: eed1dcc3-d9f9-4211-a4c3-850dcdad72b1] [ 633.860652] env[62503]: DEBUG nova.compute.utils [None req-68ed7e64-78f9-431d-ac11-4e539144d770 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: eed1dcc3-d9f9-4211-a4c3-850dcdad72b1] Binding failed for port f4b3539e-f084-44c5-a8e5-9aa2e32329cd, please check neutron logs for more information. {{(pid=62503) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 633.867523] env[62503]: DEBUG oslo_concurrency.lockutils [None req-6127b9b5-a976-4771-896a-db01ec307bda tempest-ImagesOneServerTestJSON-1249934967 tempest-ImagesOneServerTestJSON-1249934967-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 17.800s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 633.870316] env[62503]: DEBUG nova.compute.manager [None req-68ed7e64-78f9-431d-ac11-4e539144d770 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: eed1dcc3-d9f9-4211-a4c3-850dcdad72b1] Build of instance eed1dcc3-d9f9-4211-a4c3-850dcdad72b1 was re-scheduled: Binding failed for port f4b3539e-f084-44c5-a8e5-9aa2e32329cd, please check neutron logs for more information. {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2483}} [ 633.870761] env[62503]: DEBUG nova.compute.manager [None req-68ed7e64-78f9-431d-ac11-4e539144d770 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: eed1dcc3-d9f9-4211-a4c3-850dcdad72b1] Unplugging VIFs for instance {{(pid=62503) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3009}} [ 633.870990] env[62503]: DEBUG oslo_concurrency.lockutils [None req-68ed7e64-78f9-431d-ac11-4e539144d770 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Acquiring lock "refresh_cache-eed1dcc3-d9f9-4211-a4c3-850dcdad72b1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 633.871156] env[62503]: DEBUG oslo_concurrency.lockutils [None req-68ed7e64-78f9-431d-ac11-4e539144d770 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Acquired lock "refresh_cache-eed1dcc3-d9f9-4211-a4c3-850dcdad72b1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 633.871316] env[62503]: DEBUG nova.network.neutron [None req-68ed7e64-78f9-431d-ac11-4e539144d770 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: eed1dcc3-d9f9-4211-a4c3-850dcdad72b1] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 634.090484] env[62503]: INFO nova.compute.manager [None req-4bad5bb1-7fa9-4e2b-9ed4-ab328e1c526a tempest-ServerActionsTestOtherB-427346871 tempest-ServerActionsTestOtherB-427346871-project-member] [instance: bef7d4e7-9af2-4071-ae6d-bdbfa7f46460] Took 1.03 seconds to deallocate network for instance. [ 634.400997] env[62503]: DEBUG nova.network.neutron [None req-68ed7e64-78f9-431d-ac11-4e539144d770 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: eed1dcc3-d9f9-4211-a4c3-850dcdad72b1] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 634.553306] env[62503]: DEBUG nova.network.neutron [None req-68ed7e64-78f9-431d-ac11-4e539144d770 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: eed1dcc3-d9f9-4211-a4c3-850dcdad72b1] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 634.776899] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e4d4d20-2c51-4679-81ff-0bfcd5adff82 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.785177] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3890a8a-d405-47de-8640-38a83eaa34b9 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.822312] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a43dc73f-46ed-4a94-8460-3ac48bf6210e {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.831340] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20aaabce-a027-4935-8d47-5a9706cf52ed {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.845579] env[62503]: DEBUG nova.compute.provider_tree [None req-6127b9b5-a976-4771-896a-db01ec307bda tempest-ImagesOneServerTestJSON-1249934967 tempest-ImagesOneServerTestJSON-1249934967-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 635.059986] env[62503]: DEBUG oslo_concurrency.lockutils [None req-68ed7e64-78f9-431d-ac11-4e539144d770 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Releasing lock "refresh_cache-eed1dcc3-d9f9-4211-a4c3-850dcdad72b1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 635.059986] env[62503]: DEBUG nova.compute.manager [None req-68ed7e64-78f9-431d-ac11-4e539144d770 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62503) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3032}} [ 635.059986] env[62503]: DEBUG nova.compute.manager [None req-68ed7e64-78f9-431d-ac11-4e539144d770 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: eed1dcc3-d9f9-4211-a4c3-850dcdad72b1] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 635.060162] env[62503]: DEBUG nova.network.neutron [None req-68ed7e64-78f9-431d-ac11-4e539144d770 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: eed1dcc3-d9f9-4211-a4c3-850dcdad72b1] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 635.088064] env[62503]: DEBUG nova.network.neutron [None req-68ed7e64-78f9-431d-ac11-4e539144d770 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: eed1dcc3-d9f9-4211-a4c3-850dcdad72b1] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 635.129982] env[62503]: INFO nova.scheduler.client.report [None req-4bad5bb1-7fa9-4e2b-9ed4-ab328e1c526a tempest-ServerActionsTestOtherB-427346871 tempest-ServerActionsTestOtherB-427346871-project-member] Deleted allocations for instance bef7d4e7-9af2-4071-ae6d-bdbfa7f46460 [ 635.288412] env[62503]: DEBUG oslo_service.periodic_task [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 635.288642] env[62503]: DEBUG oslo_service.periodic_task [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 635.350319] env[62503]: DEBUG nova.scheduler.client.report [None req-6127b9b5-a976-4771-896a-db01ec307bda tempest-ImagesOneServerTestJSON-1249934967 tempest-ImagesOneServerTestJSON-1249934967-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 635.591962] env[62503]: DEBUG nova.network.neutron [None req-68ed7e64-78f9-431d-ac11-4e539144d770 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: eed1dcc3-d9f9-4211-a4c3-850dcdad72b1] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 635.645839] env[62503]: DEBUG oslo_concurrency.lockutils [None req-4bad5bb1-7fa9-4e2b-9ed4-ab328e1c526a tempest-ServerActionsTestOtherB-427346871 tempest-ServerActionsTestOtherB-427346871-project-member] Lock "bef7d4e7-9af2-4071-ae6d-bdbfa7f46460" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 70.758s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 635.796187] env[62503]: DEBUG oslo_service.periodic_task [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 635.796433] env[62503]: DEBUG nova.compute.manager [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Starting heal instance info cache {{(pid=62503) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10224}} [ 635.796488] env[62503]: DEBUG nova.compute.manager [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Rebuilding the list of instances to heal {{(pid=62503) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10228}} [ 635.858191] env[62503]: DEBUG oslo_concurrency.lockutils [None req-6127b9b5-a976-4771-896a-db01ec307bda tempest-ImagesOneServerTestJSON-1249934967 tempest-ImagesOneServerTestJSON-1249934967-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.993s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 635.858874] env[62503]: ERROR nova.compute.manager [None req-6127b9b5-a976-4771-896a-db01ec307bda tempest-ImagesOneServerTestJSON-1249934967 tempest-ImagesOneServerTestJSON-1249934967-project-member] [instance: 1a27278b-b930-4432-90f2-45cdf025c83e] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port f3ad1bdb-6ae8-40ed-8d53-afd440e4499c, please check neutron logs for more information. [ 635.858874] env[62503]: ERROR nova.compute.manager [instance: 1a27278b-b930-4432-90f2-45cdf025c83e] Traceback (most recent call last): [ 635.858874] env[62503]: ERROR nova.compute.manager [instance: 1a27278b-b930-4432-90f2-45cdf025c83e] File "/opt/stack/nova/nova/compute/manager.py", line 2644, in _build_and_run_instance [ 635.858874] env[62503]: ERROR nova.compute.manager [instance: 1a27278b-b930-4432-90f2-45cdf025c83e] self.driver.spawn(context, instance, image_meta, [ 635.858874] env[62503]: ERROR nova.compute.manager [instance: 1a27278b-b930-4432-90f2-45cdf025c83e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 635.858874] env[62503]: ERROR nova.compute.manager [instance: 1a27278b-b930-4432-90f2-45cdf025c83e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 635.858874] env[62503]: ERROR nova.compute.manager [instance: 1a27278b-b930-4432-90f2-45cdf025c83e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 635.858874] env[62503]: ERROR nova.compute.manager [instance: 1a27278b-b930-4432-90f2-45cdf025c83e] vm_ref = self.build_virtual_machine(instance, [ 635.858874] env[62503]: ERROR nova.compute.manager [instance: 1a27278b-b930-4432-90f2-45cdf025c83e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 635.858874] env[62503]: ERROR nova.compute.manager [instance: 1a27278b-b930-4432-90f2-45cdf025c83e] vif_infos = vmwarevif.get_vif_info(self._session, [ 635.858874] env[62503]: ERROR nova.compute.manager [instance: 1a27278b-b930-4432-90f2-45cdf025c83e] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 635.859149] env[62503]: ERROR nova.compute.manager [instance: 1a27278b-b930-4432-90f2-45cdf025c83e] for vif in network_info: [ 635.859149] env[62503]: ERROR nova.compute.manager [instance: 1a27278b-b930-4432-90f2-45cdf025c83e] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 635.859149] env[62503]: ERROR nova.compute.manager [instance: 1a27278b-b930-4432-90f2-45cdf025c83e] return self._sync_wrapper(fn, *args, **kwargs) [ 635.859149] env[62503]: ERROR nova.compute.manager [instance: 1a27278b-b930-4432-90f2-45cdf025c83e] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 635.859149] env[62503]: ERROR nova.compute.manager [instance: 1a27278b-b930-4432-90f2-45cdf025c83e] self.wait() [ 635.859149] env[62503]: ERROR nova.compute.manager [instance: 1a27278b-b930-4432-90f2-45cdf025c83e] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 635.859149] env[62503]: ERROR nova.compute.manager [instance: 1a27278b-b930-4432-90f2-45cdf025c83e] self[:] = self._gt.wait() [ 635.859149] env[62503]: ERROR nova.compute.manager [instance: 1a27278b-b930-4432-90f2-45cdf025c83e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 635.859149] env[62503]: ERROR nova.compute.manager [instance: 1a27278b-b930-4432-90f2-45cdf025c83e] return self._exit_event.wait() [ 635.859149] env[62503]: ERROR nova.compute.manager [instance: 1a27278b-b930-4432-90f2-45cdf025c83e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 635.859149] env[62503]: ERROR nova.compute.manager [instance: 1a27278b-b930-4432-90f2-45cdf025c83e] current.throw(*self._exc) [ 635.859149] env[62503]: ERROR nova.compute.manager [instance: 1a27278b-b930-4432-90f2-45cdf025c83e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 635.859149] env[62503]: ERROR nova.compute.manager [instance: 1a27278b-b930-4432-90f2-45cdf025c83e] result = function(*args, **kwargs) [ 635.859485] env[62503]: ERROR nova.compute.manager [instance: 1a27278b-b930-4432-90f2-45cdf025c83e] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 635.859485] env[62503]: ERROR nova.compute.manager [instance: 1a27278b-b930-4432-90f2-45cdf025c83e] return func(*args, **kwargs) [ 635.859485] env[62503]: ERROR nova.compute.manager [instance: 1a27278b-b930-4432-90f2-45cdf025c83e] File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 635.859485] env[62503]: ERROR nova.compute.manager [instance: 1a27278b-b930-4432-90f2-45cdf025c83e] raise e [ 635.859485] env[62503]: ERROR nova.compute.manager [instance: 1a27278b-b930-4432-90f2-45cdf025c83e] File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 635.859485] env[62503]: ERROR nova.compute.manager [instance: 1a27278b-b930-4432-90f2-45cdf025c83e] nwinfo = self.network_api.allocate_for_instance( [ 635.859485] env[62503]: ERROR nova.compute.manager [instance: 1a27278b-b930-4432-90f2-45cdf025c83e] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 635.859485] env[62503]: ERROR nova.compute.manager [instance: 1a27278b-b930-4432-90f2-45cdf025c83e] created_port_ids = self._update_ports_for_instance( [ 635.859485] env[62503]: ERROR nova.compute.manager [instance: 1a27278b-b930-4432-90f2-45cdf025c83e] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 635.859485] env[62503]: ERROR nova.compute.manager [instance: 1a27278b-b930-4432-90f2-45cdf025c83e] with excutils.save_and_reraise_exception(): [ 635.859485] env[62503]: ERROR nova.compute.manager [instance: 1a27278b-b930-4432-90f2-45cdf025c83e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 635.859485] env[62503]: ERROR nova.compute.manager [instance: 1a27278b-b930-4432-90f2-45cdf025c83e] self.force_reraise() [ 635.859485] env[62503]: ERROR nova.compute.manager [instance: 1a27278b-b930-4432-90f2-45cdf025c83e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 635.859771] env[62503]: ERROR nova.compute.manager [instance: 1a27278b-b930-4432-90f2-45cdf025c83e] raise self.value [ 635.859771] env[62503]: ERROR nova.compute.manager [instance: 1a27278b-b930-4432-90f2-45cdf025c83e] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 635.859771] env[62503]: ERROR nova.compute.manager [instance: 1a27278b-b930-4432-90f2-45cdf025c83e] updated_port = self._update_port( [ 635.859771] env[62503]: ERROR nova.compute.manager [instance: 1a27278b-b930-4432-90f2-45cdf025c83e] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 635.859771] env[62503]: ERROR nova.compute.manager [instance: 1a27278b-b930-4432-90f2-45cdf025c83e] _ensure_no_port_binding_failure(port) [ 635.859771] env[62503]: ERROR nova.compute.manager [instance: 1a27278b-b930-4432-90f2-45cdf025c83e] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 635.859771] env[62503]: ERROR nova.compute.manager [instance: 1a27278b-b930-4432-90f2-45cdf025c83e] raise exception.PortBindingFailed(port_id=port['id']) [ 635.859771] env[62503]: ERROR nova.compute.manager [instance: 1a27278b-b930-4432-90f2-45cdf025c83e] nova.exception.PortBindingFailed: Binding failed for port f3ad1bdb-6ae8-40ed-8d53-afd440e4499c, please check neutron logs for more information. [ 635.859771] env[62503]: ERROR nova.compute.manager [instance: 1a27278b-b930-4432-90f2-45cdf025c83e] [ 635.859771] env[62503]: DEBUG nova.compute.utils [None req-6127b9b5-a976-4771-896a-db01ec307bda tempest-ImagesOneServerTestJSON-1249934967 tempest-ImagesOneServerTestJSON-1249934967-project-member] [instance: 1a27278b-b930-4432-90f2-45cdf025c83e] Binding failed for port f3ad1bdb-6ae8-40ed-8d53-afd440e4499c, please check neutron logs for more information. {{(pid=62503) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 635.863635] env[62503]: DEBUG nova.compute.manager [None req-6127b9b5-a976-4771-896a-db01ec307bda tempest-ImagesOneServerTestJSON-1249934967 tempest-ImagesOneServerTestJSON-1249934967-project-member] [instance: 1a27278b-b930-4432-90f2-45cdf025c83e] Build of instance 1a27278b-b930-4432-90f2-45cdf025c83e was re-scheduled: Binding failed for port f3ad1bdb-6ae8-40ed-8d53-afd440e4499c, please check neutron logs for more information. {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2483}} [ 635.864071] env[62503]: DEBUG nova.compute.manager [None req-6127b9b5-a976-4771-896a-db01ec307bda tempest-ImagesOneServerTestJSON-1249934967 tempest-ImagesOneServerTestJSON-1249934967-project-member] [instance: 1a27278b-b930-4432-90f2-45cdf025c83e] Unplugging VIFs for instance {{(pid=62503) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3009}} [ 635.864412] env[62503]: DEBUG oslo_concurrency.lockutils [None req-6127b9b5-a976-4771-896a-db01ec307bda tempest-ImagesOneServerTestJSON-1249934967 tempest-ImagesOneServerTestJSON-1249934967-project-member] Acquiring lock "refresh_cache-1a27278b-b930-4432-90f2-45cdf025c83e" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 635.864622] env[62503]: DEBUG oslo_concurrency.lockutils [None req-6127b9b5-a976-4771-896a-db01ec307bda tempest-ImagesOneServerTestJSON-1249934967 tempest-ImagesOneServerTestJSON-1249934967-project-member] Acquired lock "refresh_cache-1a27278b-b930-4432-90f2-45cdf025c83e" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 635.864795] env[62503]: DEBUG nova.network.neutron [None req-6127b9b5-a976-4771-896a-db01ec307bda tempest-ImagesOneServerTestJSON-1249934967 tempest-ImagesOneServerTestJSON-1249934967-project-member] [instance: 1a27278b-b930-4432-90f2-45cdf025c83e] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 635.867286] env[62503]: DEBUG oslo_concurrency.lockutils [None req-2f620b9e-0fbe-4cd8-a880-2520c6ad4568 tempest-DeleteServersAdminTestJSON-318637377 tempest-DeleteServersAdminTestJSON-318637377-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.147s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 635.868716] env[62503]: INFO nova.compute.claims [None req-2f620b9e-0fbe-4cd8-a880-2520c6ad4568 tempest-DeleteServersAdminTestJSON-318637377 tempest-DeleteServersAdminTestJSON-318637377-project-member] [instance: 73142a3f-3be8-4956-90f8-6ca223d2d01f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 636.096804] env[62503]: INFO nova.compute.manager [None req-68ed7e64-78f9-431d-ac11-4e539144d770 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: eed1dcc3-d9f9-4211-a4c3-850dcdad72b1] Took 1.03 seconds to deallocate network for instance. [ 636.150260] env[62503]: DEBUG nova.compute.manager [None req-b638fdbf-443c-4fc0-ac4f-b9d527537434 tempest-ImagesNegativeTestJSON-829730830 tempest-ImagesNegativeTestJSON-829730830-project-member] [instance: e49252e3-11cc-49c3-b959-24ad87ad48c9] Starting instance... {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 636.304099] env[62503]: DEBUG nova.compute.manager [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] [instance: 30befad4-aacb-44d5-87ed-4fc6b0e34bd6] Skipping network cache update for instance because it is Building. {{(pid=62503) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10237}} [ 636.304099] env[62503]: DEBUG nova.compute.manager [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] [instance: a8d8e232-6096-4da3-8f2c-65a5e5f713ae] Skipping network cache update for instance because it is Building. {{(pid=62503) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10237}} [ 636.304099] env[62503]: DEBUG nova.compute.manager [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Didn't find any instances for network info cache update. {{(pid=62503) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10310}} [ 636.304099] env[62503]: DEBUG oslo_service.periodic_task [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 636.304099] env[62503]: DEBUG oslo_service.periodic_task [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 636.304099] env[62503]: DEBUG oslo_service.periodic_task [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 636.304278] env[62503]: DEBUG oslo_service.periodic_task [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 636.304385] env[62503]: DEBUG oslo_service.periodic_task [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 636.304521] env[62503]: DEBUG oslo_service.periodic_task [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 636.304678] env[62503]: DEBUG nova.compute.manager [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62503) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10843}} [ 636.304787] env[62503]: DEBUG oslo_service.periodic_task [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 636.394412] env[62503]: DEBUG nova.network.neutron [None req-6127b9b5-a976-4771-896a-db01ec307bda tempest-ImagesOneServerTestJSON-1249934967 tempest-ImagesOneServerTestJSON-1249934967-project-member] [instance: 1a27278b-b930-4432-90f2-45cdf025c83e] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 636.514404] env[62503]: DEBUG nova.network.neutron [None req-6127b9b5-a976-4771-896a-db01ec307bda tempest-ImagesOneServerTestJSON-1249934967 tempest-ImagesOneServerTestJSON-1249934967-project-member] [instance: 1a27278b-b930-4432-90f2-45cdf025c83e] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 636.706342] env[62503]: DEBUG oslo_concurrency.lockutils [None req-b638fdbf-443c-4fc0-ac4f-b9d527537434 tempest-ImagesNegativeTestJSON-829730830 tempest-ImagesNegativeTestJSON-829730830-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 636.816341] env[62503]: DEBUG oslo_concurrency.lockutils [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 637.015101] env[62503]: DEBUG oslo_concurrency.lockutils [None req-6127b9b5-a976-4771-896a-db01ec307bda tempest-ImagesOneServerTestJSON-1249934967 tempest-ImagesOneServerTestJSON-1249934967-project-member] Releasing lock "refresh_cache-1a27278b-b930-4432-90f2-45cdf025c83e" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 637.015411] env[62503]: DEBUG nova.compute.manager [None req-6127b9b5-a976-4771-896a-db01ec307bda tempest-ImagesOneServerTestJSON-1249934967 tempest-ImagesOneServerTestJSON-1249934967-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62503) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3032}} [ 637.015650] env[62503]: DEBUG nova.compute.manager [None req-6127b9b5-a976-4771-896a-db01ec307bda tempest-ImagesOneServerTestJSON-1249934967 tempest-ImagesOneServerTestJSON-1249934967-project-member] [instance: 1a27278b-b930-4432-90f2-45cdf025c83e] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 637.015854] env[62503]: DEBUG nova.network.neutron [None req-6127b9b5-a976-4771-896a-db01ec307bda tempest-ImagesOneServerTestJSON-1249934967 tempest-ImagesOneServerTestJSON-1249934967-project-member] [instance: 1a27278b-b930-4432-90f2-45cdf025c83e] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 637.036307] env[62503]: DEBUG nova.network.neutron [None req-6127b9b5-a976-4771-896a-db01ec307bda tempest-ImagesOneServerTestJSON-1249934967 tempest-ImagesOneServerTestJSON-1249934967-project-member] [instance: 1a27278b-b930-4432-90f2-45cdf025c83e] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 637.147147] env[62503]: INFO nova.scheduler.client.report [None req-68ed7e64-78f9-431d-ac11-4e539144d770 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Deleted allocations for instance eed1dcc3-d9f9-4211-a4c3-850dcdad72b1 [ 637.294353] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db26cb84-f40c-48e2-8d4b-9450e5b60cec {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.304310] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e7e11ca-218f-4c2b-beda-3a6723b26bb8 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.342585] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-476ce3ca-cf2d-4503-b376-ac263d26e551 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.349292] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3f4f957-5c6a-421d-878e-25c6b041e1df {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.365899] env[62503]: DEBUG nova.compute.provider_tree [None req-2f620b9e-0fbe-4cd8-a880-2520c6ad4568 tempest-DeleteServersAdminTestJSON-318637377 tempest-DeleteServersAdminTestJSON-318637377-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 637.539942] env[62503]: DEBUG nova.network.neutron [None req-6127b9b5-a976-4771-896a-db01ec307bda tempest-ImagesOneServerTestJSON-1249934967 tempest-ImagesOneServerTestJSON-1249934967-project-member] [instance: 1a27278b-b930-4432-90f2-45cdf025c83e] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 637.663616] env[62503]: DEBUG oslo_concurrency.lockutils [None req-68ed7e64-78f9-431d-ac11-4e539144d770 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Lock "eed1dcc3-d9f9-4211-a4c3-850dcdad72b1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 71.348s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 637.870086] env[62503]: DEBUG nova.scheduler.client.report [None req-2f620b9e-0fbe-4cd8-a880-2520c6ad4568 tempest-DeleteServersAdminTestJSON-318637377 tempest-DeleteServersAdminTestJSON-318637377-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 638.046646] env[62503]: INFO nova.compute.manager [None req-6127b9b5-a976-4771-896a-db01ec307bda tempest-ImagesOneServerTestJSON-1249934967 tempest-ImagesOneServerTestJSON-1249934967-project-member] [instance: 1a27278b-b930-4432-90f2-45cdf025c83e] Took 1.03 seconds to deallocate network for instance. [ 638.168288] env[62503]: DEBUG nova.compute.manager [None req-f5db60c5-fc27-48c4-aa82-4f7bd2779675 tempest-ServersTestBootFromVolume-938742055 tempest-ServersTestBootFromVolume-938742055-project-member] [instance: 5ca1a33c-7324-481c-95cd-3761ce8ccf13] Starting instance... {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 638.378515] env[62503]: DEBUG oslo_concurrency.lockutils [None req-2f620b9e-0fbe-4cd8-a880-2520c6ad4568 tempest-DeleteServersAdminTestJSON-318637377 tempest-DeleteServersAdminTestJSON-318637377-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.512s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 638.379164] env[62503]: DEBUG nova.compute.manager [None req-2f620b9e-0fbe-4cd8-a880-2520c6ad4568 tempest-DeleteServersAdminTestJSON-318637377 tempest-DeleteServersAdminTestJSON-318637377-project-member] [instance: 73142a3f-3be8-4956-90f8-6ca223d2d01f] Start building networks asynchronously for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2832}} [ 638.382795] env[62503]: DEBUG oslo_concurrency.lockutils [None req-733aadb7-44ff-4603-9cb7-3c04ddeea34f tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 17.864s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 638.701275] env[62503]: DEBUG oslo_concurrency.lockutils [None req-f5db60c5-fc27-48c4-aa82-4f7bd2779675 tempest-ServersTestBootFromVolume-938742055 tempest-ServersTestBootFromVolume-938742055-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 638.890435] env[62503]: DEBUG nova.compute.utils [None req-2f620b9e-0fbe-4cd8-a880-2520c6ad4568 tempest-DeleteServersAdminTestJSON-318637377 tempest-DeleteServersAdminTestJSON-318637377-project-member] Using /dev/sd instead of None {{(pid=62503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 638.898112] env[62503]: DEBUG nova.compute.manager [None req-2f620b9e-0fbe-4cd8-a880-2520c6ad4568 tempest-DeleteServersAdminTestJSON-318637377 tempest-DeleteServersAdminTestJSON-318637377-project-member] [instance: 73142a3f-3be8-4956-90f8-6ca223d2d01f] Allocating IP information in the background. {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 638.898325] env[62503]: DEBUG nova.network.neutron [None req-2f620b9e-0fbe-4cd8-a880-2520c6ad4568 tempest-DeleteServersAdminTestJSON-318637377 tempest-DeleteServersAdminTestJSON-318637377-project-member] [instance: 73142a3f-3be8-4956-90f8-6ca223d2d01f] allocate_for_instance() {{(pid=62503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 638.969789] env[62503]: DEBUG nova.policy [None req-2f620b9e-0fbe-4cd8-a880-2520c6ad4568 tempest-DeleteServersAdminTestJSON-318637377 tempest-DeleteServersAdminTestJSON-318637377-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '651b5babbd204dba8928d697353a33b1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '87268f38d19e4ea485502b201120a947', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62503) authorize /opt/stack/nova/nova/policy.py:201}} [ 639.096345] env[62503]: INFO nova.scheduler.client.report [None req-6127b9b5-a976-4771-896a-db01ec307bda tempest-ImagesOneServerTestJSON-1249934967 tempest-ImagesOneServerTestJSON-1249934967-project-member] Deleted allocations for instance 1a27278b-b930-4432-90f2-45cdf025c83e [ 639.303768] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7cf8284-5fe0-47fb-a206-1c7af0338ff4 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.313553] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07c09940-ae1a-49c0-ac34-a83e27db8bf8 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.356021] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-587b5271-9dd6-4341-841d-9f7c936c6e8d {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.359720] env[62503]: DEBUG oslo_concurrency.lockutils [None req-98b7d7bf-9e47-4fe2-b967-b3c7085ad444 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Acquiring lock "c1693c1f-6497-429c-a7f7-5bf5591684d1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 639.360192] env[62503]: DEBUG oslo_concurrency.lockutils [None req-98b7d7bf-9e47-4fe2-b967-b3c7085ad444 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Lock "c1693c1f-6497-429c-a7f7-5bf5591684d1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 639.365456] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33e28da5-645e-4ec4-a516-91027a7881d7 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.379890] env[62503]: DEBUG nova.compute.provider_tree [None req-733aadb7-44ff-4603-9cb7-3c04ddeea34f tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 639.401016] env[62503]: DEBUG nova.compute.manager [None req-2f620b9e-0fbe-4cd8-a880-2520c6ad4568 tempest-DeleteServersAdminTestJSON-318637377 tempest-DeleteServersAdminTestJSON-318637377-project-member] [instance: 73142a3f-3be8-4956-90f8-6ca223d2d01f] Start building block device mappings for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2867}} [ 639.612798] env[62503]: DEBUG oslo_concurrency.lockutils [None req-6127b9b5-a976-4771-896a-db01ec307bda tempest-ImagesOneServerTestJSON-1249934967 tempest-ImagesOneServerTestJSON-1249934967-project-member] Lock "1a27278b-b930-4432-90f2-45cdf025c83e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 71.611s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 639.639254] env[62503]: DEBUG nova.network.neutron [None req-2f620b9e-0fbe-4cd8-a880-2520c6ad4568 tempest-DeleteServersAdminTestJSON-318637377 tempest-DeleteServersAdminTestJSON-318637377-project-member] [instance: 73142a3f-3be8-4956-90f8-6ca223d2d01f] Successfully created port: 95cc94d9-2d68-4482-b2e3-5457ec2518c9 {{(pid=62503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 639.884240] env[62503]: DEBUG nova.scheduler.client.report [None req-733aadb7-44ff-4603-9cb7-3c04ddeea34f tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 640.117096] env[62503]: DEBUG nova.compute.manager [None req-810913a5-54c9-4615-8817-f2307b8447be tempest-AttachVolumeTestJSON-1238928908 tempest-AttachVolumeTestJSON-1238928908-project-member] [instance: 2aa7880f-de24-4f32-b027-731a2030f987] Starting instance... {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 640.397774] env[62503]: DEBUG oslo_concurrency.lockutils [None req-733aadb7-44ff-4603-9cb7-3c04ddeea34f tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.015s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 640.401490] env[62503]: ERROR nova.compute.manager [None req-733aadb7-44ff-4603-9cb7-3c04ddeea34f tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] [instance: 30befad4-aacb-44d5-87ed-4fc6b0e34bd6] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 4cefe5dc-a25b-4540-9171-c28eb6b58b3f, please check neutron logs for more information. [ 640.401490] env[62503]: ERROR nova.compute.manager [instance: 30befad4-aacb-44d5-87ed-4fc6b0e34bd6] Traceback (most recent call last): [ 640.401490] env[62503]: ERROR nova.compute.manager [instance: 30befad4-aacb-44d5-87ed-4fc6b0e34bd6] File "/opt/stack/nova/nova/compute/manager.py", line 2644, in _build_and_run_instance [ 640.401490] env[62503]: ERROR nova.compute.manager [instance: 30befad4-aacb-44d5-87ed-4fc6b0e34bd6] self.driver.spawn(context, instance, image_meta, [ 640.401490] env[62503]: ERROR nova.compute.manager [instance: 30befad4-aacb-44d5-87ed-4fc6b0e34bd6] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 640.401490] env[62503]: ERROR nova.compute.manager [instance: 30befad4-aacb-44d5-87ed-4fc6b0e34bd6] self._vmops.spawn(context, instance, image_meta, injected_files, [ 640.401490] env[62503]: ERROR nova.compute.manager [instance: 30befad4-aacb-44d5-87ed-4fc6b0e34bd6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 640.401490] env[62503]: ERROR nova.compute.manager [instance: 30befad4-aacb-44d5-87ed-4fc6b0e34bd6] vm_ref = self.build_virtual_machine(instance, [ 640.401490] env[62503]: ERROR nova.compute.manager [instance: 30befad4-aacb-44d5-87ed-4fc6b0e34bd6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 640.401490] env[62503]: ERROR nova.compute.manager [instance: 30befad4-aacb-44d5-87ed-4fc6b0e34bd6] vif_infos = vmwarevif.get_vif_info(self._session, [ 640.401490] env[62503]: ERROR nova.compute.manager [instance: 30befad4-aacb-44d5-87ed-4fc6b0e34bd6] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 640.401846] env[62503]: ERROR nova.compute.manager [instance: 30befad4-aacb-44d5-87ed-4fc6b0e34bd6] for vif in network_info: [ 640.401846] env[62503]: ERROR nova.compute.manager [instance: 30befad4-aacb-44d5-87ed-4fc6b0e34bd6] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 640.401846] env[62503]: ERROR nova.compute.manager [instance: 30befad4-aacb-44d5-87ed-4fc6b0e34bd6] return self._sync_wrapper(fn, *args, **kwargs) [ 640.401846] env[62503]: ERROR nova.compute.manager [instance: 30befad4-aacb-44d5-87ed-4fc6b0e34bd6] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 640.401846] env[62503]: ERROR nova.compute.manager [instance: 30befad4-aacb-44d5-87ed-4fc6b0e34bd6] self.wait() [ 640.401846] env[62503]: ERROR nova.compute.manager [instance: 30befad4-aacb-44d5-87ed-4fc6b0e34bd6] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 640.401846] env[62503]: ERROR nova.compute.manager [instance: 30befad4-aacb-44d5-87ed-4fc6b0e34bd6] self[:] = self._gt.wait() [ 640.401846] env[62503]: ERROR nova.compute.manager [instance: 30befad4-aacb-44d5-87ed-4fc6b0e34bd6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 640.401846] env[62503]: ERROR nova.compute.manager [instance: 30befad4-aacb-44d5-87ed-4fc6b0e34bd6] return self._exit_event.wait() [ 640.401846] env[62503]: ERROR nova.compute.manager [instance: 30befad4-aacb-44d5-87ed-4fc6b0e34bd6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 640.401846] env[62503]: ERROR nova.compute.manager [instance: 30befad4-aacb-44d5-87ed-4fc6b0e34bd6] result = hub.switch() [ 640.401846] env[62503]: ERROR nova.compute.manager [instance: 30befad4-aacb-44d5-87ed-4fc6b0e34bd6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 640.401846] env[62503]: ERROR nova.compute.manager [instance: 30befad4-aacb-44d5-87ed-4fc6b0e34bd6] return self.greenlet.switch() [ 640.402186] env[62503]: ERROR nova.compute.manager [instance: 30befad4-aacb-44d5-87ed-4fc6b0e34bd6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 640.402186] env[62503]: ERROR nova.compute.manager [instance: 30befad4-aacb-44d5-87ed-4fc6b0e34bd6] result = function(*args, **kwargs) [ 640.402186] env[62503]: ERROR nova.compute.manager [instance: 30befad4-aacb-44d5-87ed-4fc6b0e34bd6] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 640.402186] env[62503]: ERROR nova.compute.manager [instance: 30befad4-aacb-44d5-87ed-4fc6b0e34bd6] return func(*args, **kwargs) [ 640.402186] env[62503]: ERROR nova.compute.manager [instance: 30befad4-aacb-44d5-87ed-4fc6b0e34bd6] File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 640.402186] env[62503]: ERROR nova.compute.manager [instance: 30befad4-aacb-44d5-87ed-4fc6b0e34bd6] raise e [ 640.402186] env[62503]: ERROR nova.compute.manager [instance: 30befad4-aacb-44d5-87ed-4fc6b0e34bd6] File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 640.402186] env[62503]: ERROR nova.compute.manager [instance: 30befad4-aacb-44d5-87ed-4fc6b0e34bd6] nwinfo = self.network_api.allocate_for_instance( [ 640.402186] env[62503]: ERROR nova.compute.manager [instance: 30befad4-aacb-44d5-87ed-4fc6b0e34bd6] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 640.402186] env[62503]: ERROR nova.compute.manager [instance: 30befad4-aacb-44d5-87ed-4fc6b0e34bd6] created_port_ids = self._update_ports_for_instance( [ 640.402186] env[62503]: ERROR nova.compute.manager [instance: 30befad4-aacb-44d5-87ed-4fc6b0e34bd6] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 640.402186] env[62503]: ERROR nova.compute.manager [instance: 30befad4-aacb-44d5-87ed-4fc6b0e34bd6] with excutils.save_and_reraise_exception(): [ 640.402186] env[62503]: ERROR nova.compute.manager [instance: 30befad4-aacb-44d5-87ed-4fc6b0e34bd6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 640.402536] env[62503]: ERROR nova.compute.manager [instance: 30befad4-aacb-44d5-87ed-4fc6b0e34bd6] self.force_reraise() [ 640.402536] env[62503]: ERROR nova.compute.manager [instance: 30befad4-aacb-44d5-87ed-4fc6b0e34bd6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 640.402536] env[62503]: ERROR nova.compute.manager [instance: 30befad4-aacb-44d5-87ed-4fc6b0e34bd6] raise self.value [ 640.402536] env[62503]: ERROR nova.compute.manager [instance: 30befad4-aacb-44d5-87ed-4fc6b0e34bd6] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 640.402536] env[62503]: ERROR nova.compute.manager [instance: 30befad4-aacb-44d5-87ed-4fc6b0e34bd6] updated_port = self._update_port( [ 640.402536] env[62503]: ERROR nova.compute.manager [instance: 30befad4-aacb-44d5-87ed-4fc6b0e34bd6] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 640.402536] env[62503]: ERROR nova.compute.manager [instance: 30befad4-aacb-44d5-87ed-4fc6b0e34bd6] _ensure_no_port_binding_failure(port) [ 640.402536] env[62503]: ERROR nova.compute.manager [instance: 30befad4-aacb-44d5-87ed-4fc6b0e34bd6] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 640.402536] env[62503]: ERROR nova.compute.manager [instance: 30befad4-aacb-44d5-87ed-4fc6b0e34bd6] raise exception.PortBindingFailed(port_id=port['id']) [ 640.402536] env[62503]: ERROR nova.compute.manager [instance: 30befad4-aacb-44d5-87ed-4fc6b0e34bd6] nova.exception.PortBindingFailed: Binding failed for port 4cefe5dc-a25b-4540-9171-c28eb6b58b3f, please check neutron logs for more information. [ 640.402536] env[62503]: ERROR nova.compute.manager [instance: 30befad4-aacb-44d5-87ed-4fc6b0e34bd6] [ 640.402809] env[62503]: DEBUG nova.compute.utils [None req-733aadb7-44ff-4603-9cb7-3c04ddeea34f tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] [instance: 30befad4-aacb-44d5-87ed-4fc6b0e34bd6] Binding failed for port 4cefe5dc-a25b-4540-9171-c28eb6b58b3f, please check neutron logs for more information. {{(pid=62503) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 640.403162] env[62503]: DEBUG oslo_concurrency.lockutils [None req-3c8eac85-f4ca-46c6-a47f-7220e201d8ac tempest-FloatingIPsAssociationTestJSON-190004196 tempest-FloatingIPsAssociationTestJSON-190004196-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 17.245s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 640.407078] env[62503]: DEBUG nova.compute.manager [None req-733aadb7-44ff-4603-9cb7-3c04ddeea34f tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] [instance: 30befad4-aacb-44d5-87ed-4fc6b0e34bd6] Build of instance 30befad4-aacb-44d5-87ed-4fc6b0e34bd6 was re-scheduled: Binding failed for port 4cefe5dc-a25b-4540-9171-c28eb6b58b3f, please check neutron logs for more information. {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2483}} [ 640.407580] env[62503]: DEBUG nova.compute.manager [None req-733aadb7-44ff-4603-9cb7-3c04ddeea34f tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] [instance: 30befad4-aacb-44d5-87ed-4fc6b0e34bd6] Unplugging VIFs for instance {{(pid=62503) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3009}} [ 640.407815] env[62503]: DEBUG oslo_concurrency.lockutils [None req-733aadb7-44ff-4603-9cb7-3c04ddeea34f tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] Acquiring lock "refresh_cache-30befad4-aacb-44d5-87ed-4fc6b0e34bd6" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 640.407960] env[62503]: DEBUG oslo_concurrency.lockutils [None req-733aadb7-44ff-4603-9cb7-3c04ddeea34f tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] Acquired lock "refresh_cache-30befad4-aacb-44d5-87ed-4fc6b0e34bd6" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 640.409316] env[62503]: DEBUG nova.network.neutron [None req-733aadb7-44ff-4603-9cb7-3c04ddeea34f tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] [instance: 30befad4-aacb-44d5-87ed-4fc6b0e34bd6] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 640.411360] env[62503]: DEBUG nova.compute.manager [None req-2f620b9e-0fbe-4cd8-a880-2520c6ad4568 tempest-DeleteServersAdminTestJSON-318637377 tempest-DeleteServersAdminTestJSON-318637377-project-member] [instance: 73142a3f-3be8-4956-90f8-6ca223d2d01f] Start spawning the instance on the hypervisor. {{(pid=62503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2641}} [ 640.449104] env[62503]: DEBUG nova.virt.hardware [None req-2f620b9e-0fbe-4cd8-a880-2520c6ad4568 tempest-DeleteServersAdminTestJSON-318637377 tempest-DeleteServersAdminTestJSON-318637377-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:26:20Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:26:03Z,direct_url=,disk_format='vmdk',id=8150ca02-f879-471d-8913-459408f127a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26d9ee75d25b4018b8bb1fb11c8bd98c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:26:04Z,virtual_size=,visibility=), allow threads: False {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 640.449557] env[62503]: DEBUG nova.virt.hardware [None req-2f620b9e-0fbe-4cd8-a880-2520c6ad4568 tempest-DeleteServersAdminTestJSON-318637377 tempest-DeleteServersAdminTestJSON-318637377-project-member] Flavor limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 640.449840] env[62503]: DEBUG nova.virt.hardware [None req-2f620b9e-0fbe-4cd8-a880-2520c6ad4568 tempest-DeleteServersAdminTestJSON-318637377 tempest-DeleteServersAdminTestJSON-318637377-project-member] Image limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 640.450134] env[62503]: DEBUG nova.virt.hardware [None req-2f620b9e-0fbe-4cd8-a880-2520c6ad4568 tempest-DeleteServersAdminTestJSON-318637377 tempest-DeleteServersAdminTestJSON-318637377-project-member] Flavor pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 640.450389] env[62503]: DEBUG nova.virt.hardware [None req-2f620b9e-0fbe-4cd8-a880-2520c6ad4568 tempest-DeleteServersAdminTestJSON-318637377 tempest-DeleteServersAdminTestJSON-318637377-project-member] Image pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 640.450749] env[62503]: DEBUG nova.virt.hardware [None req-2f620b9e-0fbe-4cd8-a880-2520c6ad4568 tempest-DeleteServersAdminTestJSON-318637377 tempest-DeleteServersAdminTestJSON-318637377-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 640.451493] env[62503]: DEBUG nova.virt.hardware [None req-2f620b9e-0fbe-4cd8-a880-2520c6ad4568 tempest-DeleteServersAdminTestJSON-318637377 tempest-DeleteServersAdminTestJSON-318637377-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 640.451750] env[62503]: DEBUG nova.virt.hardware [None req-2f620b9e-0fbe-4cd8-a880-2520c6ad4568 tempest-DeleteServersAdminTestJSON-318637377 tempest-DeleteServersAdminTestJSON-318637377-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 640.452035] env[62503]: DEBUG nova.virt.hardware [None req-2f620b9e-0fbe-4cd8-a880-2520c6ad4568 tempest-DeleteServersAdminTestJSON-318637377 tempest-DeleteServersAdminTestJSON-318637377-project-member] Got 1 possible topologies {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 640.452320] env[62503]: DEBUG nova.virt.hardware [None req-2f620b9e-0fbe-4cd8-a880-2520c6ad4568 tempest-DeleteServersAdminTestJSON-318637377 tempest-DeleteServersAdminTestJSON-318637377-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 640.452685] env[62503]: DEBUG nova.virt.hardware [None req-2f620b9e-0fbe-4cd8-a880-2520c6ad4568 tempest-DeleteServersAdminTestJSON-318637377 tempest-DeleteServersAdminTestJSON-318637377-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 640.453652] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70813abb-da55-4c4e-88b5-6e4977ad65ba {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.464306] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d03f783-0272-4ec5-8360-5d0134f3bae7 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.489519] env[62503]: DEBUG oslo_concurrency.lockutils [None req-d48c1997-54b2-410a-b1fd-f52e7748bdfd tempest-ServerGroupTestJSON-1883967840 tempest-ServerGroupTestJSON-1883967840-project-member] Acquiring lock "be79632e-78ca-440a-92ef-d86a9f32693e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 640.489894] env[62503]: DEBUG oslo_concurrency.lockutils [None req-d48c1997-54b2-410a-b1fd-f52e7748bdfd tempest-ServerGroupTestJSON-1883967840 tempest-ServerGroupTestJSON-1883967840-project-member] Lock "be79632e-78ca-440a-92ef-d86a9f32693e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 640.652360] env[62503]: DEBUG oslo_concurrency.lockutils [None req-810913a5-54c9-4615-8817-f2307b8447be tempest-AttachVolumeTestJSON-1238928908 tempest-AttachVolumeTestJSON-1238928908-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 640.947174] env[62503]: DEBUG nova.network.neutron [None req-733aadb7-44ff-4603-9cb7-3c04ddeea34f tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] [instance: 30befad4-aacb-44d5-87ed-4fc6b0e34bd6] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 640.983313] env[62503]: DEBUG nova.compute.manager [req-4d1c2731-ecbf-4250-bef4-751e81d064a2 req-8dfd212b-75a7-4eca-b4c2-f4e7a6da0d83 service nova] [instance: 73142a3f-3be8-4956-90f8-6ca223d2d01f] Received event network-changed-95cc94d9-2d68-4482-b2e3-5457ec2518c9 {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 640.983501] env[62503]: DEBUG nova.compute.manager [req-4d1c2731-ecbf-4250-bef4-751e81d064a2 req-8dfd212b-75a7-4eca-b4c2-f4e7a6da0d83 service nova] [instance: 73142a3f-3be8-4956-90f8-6ca223d2d01f] Refreshing instance network info cache due to event network-changed-95cc94d9-2d68-4482-b2e3-5457ec2518c9. {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11432}} [ 640.983708] env[62503]: DEBUG oslo_concurrency.lockutils [req-4d1c2731-ecbf-4250-bef4-751e81d064a2 req-8dfd212b-75a7-4eca-b4c2-f4e7a6da0d83 service nova] Acquiring lock "refresh_cache-73142a3f-3be8-4956-90f8-6ca223d2d01f" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 640.983846] env[62503]: DEBUG oslo_concurrency.lockutils [req-4d1c2731-ecbf-4250-bef4-751e81d064a2 req-8dfd212b-75a7-4eca-b4c2-f4e7a6da0d83 service nova] Acquired lock "refresh_cache-73142a3f-3be8-4956-90f8-6ca223d2d01f" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 640.984016] env[62503]: DEBUG nova.network.neutron [req-4d1c2731-ecbf-4250-bef4-751e81d064a2 req-8dfd212b-75a7-4eca-b4c2-f4e7a6da0d83 service nova] [instance: 73142a3f-3be8-4956-90f8-6ca223d2d01f] Refreshing network info cache for port 95cc94d9-2d68-4482-b2e3-5457ec2518c9 {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 641.044886] env[62503]: ERROR nova.compute.manager [None req-2f620b9e-0fbe-4cd8-a880-2520c6ad4568 tempest-DeleteServersAdminTestJSON-318637377 tempest-DeleteServersAdminTestJSON-318637377-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 95cc94d9-2d68-4482-b2e3-5457ec2518c9, please check neutron logs for more information. [ 641.044886] env[62503]: ERROR nova.compute.manager Traceback (most recent call last): [ 641.044886] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 641.044886] env[62503]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 641.044886] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 641.044886] env[62503]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 641.044886] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 641.044886] env[62503]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 641.044886] env[62503]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 641.044886] env[62503]: ERROR nova.compute.manager self.force_reraise() [ 641.044886] env[62503]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 641.044886] env[62503]: ERROR nova.compute.manager raise self.value [ 641.044886] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 641.044886] env[62503]: ERROR nova.compute.manager updated_port = self._update_port( [ 641.044886] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 641.044886] env[62503]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 641.045833] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 641.045833] env[62503]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 641.045833] env[62503]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 95cc94d9-2d68-4482-b2e3-5457ec2518c9, please check neutron logs for more information. [ 641.045833] env[62503]: ERROR nova.compute.manager [ 641.045833] env[62503]: Traceback (most recent call last): [ 641.045833] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 641.045833] env[62503]: listener.cb(fileno) [ 641.045833] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 641.045833] env[62503]: result = function(*args, **kwargs) [ 641.045833] env[62503]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 641.045833] env[62503]: return func(*args, **kwargs) [ 641.045833] env[62503]: File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 641.045833] env[62503]: raise e [ 641.045833] env[62503]: File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 641.045833] env[62503]: nwinfo = self.network_api.allocate_for_instance( [ 641.045833] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 641.045833] env[62503]: created_port_ids = self._update_ports_for_instance( [ 641.045833] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 641.045833] env[62503]: with excutils.save_and_reraise_exception(): [ 641.045833] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 641.045833] env[62503]: self.force_reraise() [ 641.045833] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 641.045833] env[62503]: raise self.value [ 641.045833] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 641.045833] env[62503]: updated_port = self._update_port( [ 641.045833] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 641.045833] env[62503]: _ensure_no_port_binding_failure(port) [ 641.045833] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 641.045833] env[62503]: raise exception.PortBindingFailed(port_id=port['id']) [ 641.046490] env[62503]: nova.exception.PortBindingFailed: Binding failed for port 95cc94d9-2d68-4482-b2e3-5457ec2518c9, please check neutron logs for more information. [ 641.046490] env[62503]: Removing descriptor: 14 [ 641.046490] env[62503]: ERROR nova.compute.manager [None req-2f620b9e-0fbe-4cd8-a880-2520c6ad4568 tempest-DeleteServersAdminTestJSON-318637377 tempest-DeleteServersAdminTestJSON-318637377-project-member] [instance: 73142a3f-3be8-4956-90f8-6ca223d2d01f] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 95cc94d9-2d68-4482-b2e3-5457ec2518c9, please check neutron logs for more information. [ 641.046490] env[62503]: ERROR nova.compute.manager [instance: 73142a3f-3be8-4956-90f8-6ca223d2d01f] Traceback (most recent call last): [ 641.046490] env[62503]: ERROR nova.compute.manager [instance: 73142a3f-3be8-4956-90f8-6ca223d2d01f] File "/opt/stack/nova/nova/compute/manager.py", line 2897, in _build_resources [ 641.046490] env[62503]: ERROR nova.compute.manager [instance: 73142a3f-3be8-4956-90f8-6ca223d2d01f] yield resources [ 641.046490] env[62503]: ERROR nova.compute.manager [instance: 73142a3f-3be8-4956-90f8-6ca223d2d01f] File "/opt/stack/nova/nova/compute/manager.py", line 2644, in _build_and_run_instance [ 641.046490] env[62503]: ERROR nova.compute.manager [instance: 73142a3f-3be8-4956-90f8-6ca223d2d01f] self.driver.spawn(context, instance, image_meta, [ 641.046490] env[62503]: ERROR nova.compute.manager [instance: 73142a3f-3be8-4956-90f8-6ca223d2d01f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 641.046490] env[62503]: ERROR nova.compute.manager [instance: 73142a3f-3be8-4956-90f8-6ca223d2d01f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 641.046490] env[62503]: ERROR nova.compute.manager [instance: 73142a3f-3be8-4956-90f8-6ca223d2d01f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 641.046490] env[62503]: ERROR nova.compute.manager [instance: 73142a3f-3be8-4956-90f8-6ca223d2d01f] vm_ref = self.build_virtual_machine(instance, [ 641.046760] env[62503]: ERROR nova.compute.manager [instance: 73142a3f-3be8-4956-90f8-6ca223d2d01f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 641.046760] env[62503]: ERROR nova.compute.manager [instance: 73142a3f-3be8-4956-90f8-6ca223d2d01f] vif_infos = vmwarevif.get_vif_info(self._session, [ 641.046760] env[62503]: ERROR nova.compute.manager [instance: 73142a3f-3be8-4956-90f8-6ca223d2d01f] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 641.046760] env[62503]: ERROR nova.compute.manager [instance: 73142a3f-3be8-4956-90f8-6ca223d2d01f] for vif in network_info: [ 641.046760] env[62503]: ERROR nova.compute.manager [instance: 73142a3f-3be8-4956-90f8-6ca223d2d01f] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 641.046760] env[62503]: ERROR nova.compute.manager [instance: 73142a3f-3be8-4956-90f8-6ca223d2d01f] return self._sync_wrapper(fn, *args, **kwargs) [ 641.046760] env[62503]: ERROR nova.compute.manager [instance: 73142a3f-3be8-4956-90f8-6ca223d2d01f] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 641.046760] env[62503]: ERROR nova.compute.manager [instance: 73142a3f-3be8-4956-90f8-6ca223d2d01f] self.wait() [ 641.046760] env[62503]: ERROR nova.compute.manager [instance: 73142a3f-3be8-4956-90f8-6ca223d2d01f] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 641.046760] env[62503]: ERROR nova.compute.manager [instance: 73142a3f-3be8-4956-90f8-6ca223d2d01f] self[:] = self._gt.wait() [ 641.046760] env[62503]: ERROR nova.compute.manager [instance: 73142a3f-3be8-4956-90f8-6ca223d2d01f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 641.046760] env[62503]: ERROR nova.compute.manager [instance: 73142a3f-3be8-4956-90f8-6ca223d2d01f] return self._exit_event.wait() [ 641.046760] env[62503]: ERROR nova.compute.manager [instance: 73142a3f-3be8-4956-90f8-6ca223d2d01f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 641.047117] env[62503]: ERROR nova.compute.manager [instance: 73142a3f-3be8-4956-90f8-6ca223d2d01f] result = hub.switch() [ 641.047117] env[62503]: ERROR nova.compute.manager [instance: 73142a3f-3be8-4956-90f8-6ca223d2d01f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 641.047117] env[62503]: ERROR nova.compute.manager [instance: 73142a3f-3be8-4956-90f8-6ca223d2d01f] return self.greenlet.switch() [ 641.047117] env[62503]: ERROR nova.compute.manager [instance: 73142a3f-3be8-4956-90f8-6ca223d2d01f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 641.047117] env[62503]: ERROR nova.compute.manager [instance: 73142a3f-3be8-4956-90f8-6ca223d2d01f] result = function(*args, **kwargs) [ 641.047117] env[62503]: ERROR nova.compute.manager [instance: 73142a3f-3be8-4956-90f8-6ca223d2d01f] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 641.047117] env[62503]: ERROR nova.compute.manager [instance: 73142a3f-3be8-4956-90f8-6ca223d2d01f] return func(*args, **kwargs) [ 641.047117] env[62503]: ERROR nova.compute.manager [instance: 73142a3f-3be8-4956-90f8-6ca223d2d01f] File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 641.047117] env[62503]: ERROR nova.compute.manager [instance: 73142a3f-3be8-4956-90f8-6ca223d2d01f] raise e [ 641.047117] env[62503]: ERROR nova.compute.manager [instance: 73142a3f-3be8-4956-90f8-6ca223d2d01f] File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 641.047117] env[62503]: ERROR nova.compute.manager [instance: 73142a3f-3be8-4956-90f8-6ca223d2d01f] nwinfo = self.network_api.allocate_for_instance( [ 641.047117] env[62503]: ERROR nova.compute.manager [instance: 73142a3f-3be8-4956-90f8-6ca223d2d01f] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 641.047117] env[62503]: ERROR nova.compute.manager [instance: 73142a3f-3be8-4956-90f8-6ca223d2d01f] created_port_ids = self._update_ports_for_instance( [ 641.047558] env[62503]: ERROR nova.compute.manager [instance: 73142a3f-3be8-4956-90f8-6ca223d2d01f] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 641.047558] env[62503]: ERROR nova.compute.manager [instance: 73142a3f-3be8-4956-90f8-6ca223d2d01f] with excutils.save_and_reraise_exception(): [ 641.047558] env[62503]: ERROR nova.compute.manager [instance: 73142a3f-3be8-4956-90f8-6ca223d2d01f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 641.047558] env[62503]: ERROR nova.compute.manager [instance: 73142a3f-3be8-4956-90f8-6ca223d2d01f] self.force_reraise() [ 641.047558] env[62503]: ERROR nova.compute.manager [instance: 73142a3f-3be8-4956-90f8-6ca223d2d01f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 641.047558] env[62503]: ERROR nova.compute.manager [instance: 73142a3f-3be8-4956-90f8-6ca223d2d01f] raise self.value [ 641.047558] env[62503]: ERROR nova.compute.manager [instance: 73142a3f-3be8-4956-90f8-6ca223d2d01f] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 641.047558] env[62503]: ERROR nova.compute.manager [instance: 73142a3f-3be8-4956-90f8-6ca223d2d01f] updated_port = self._update_port( [ 641.047558] env[62503]: ERROR nova.compute.manager [instance: 73142a3f-3be8-4956-90f8-6ca223d2d01f] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 641.047558] env[62503]: ERROR nova.compute.manager [instance: 73142a3f-3be8-4956-90f8-6ca223d2d01f] _ensure_no_port_binding_failure(port) [ 641.047558] env[62503]: ERROR nova.compute.manager [instance: 73142a3f-3be8-4956-90f8-6ca223d2d01f] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 641.047558] env[62503]: ERROR nova.compute.manager [instance: 73142a3f-3be8-4956-90f8-6ca223d2d01f] raise exception.PortBindingFailed(port_id=port['id']) [ 641.048293] env[62503]: ERROR nova.compute.manager [instance: 73142a3f-3be8-4956-90f8-6ca223d2d01f] nova.exception.PortBindingFailed: Binding failed for port 95cc94d9-2d68-4482-b2e3-5457ec2518c9, please check neutron logs for more information. [ 641.048293] env[62503]: ERROR nova.compute.manager [instance: 73142a3f-3be8-4956-90f8-6ca223d2d01f] [ 641.048293] env[62503]: INFO nova.compute.manager [None req-2f620b9e-0fbe-4cd8-a880-2520c6ad4568 tempest-DeleteServersAdminTestJSON-318637377 tempest-DeleteServersAdminTestJSON-318637377-project-member] [instance: 73142a3f-3be8-4956-90f8-6ca223d2d01f] Terminating instance [ 641.052503] env[62503]: DEBUG oslo_concurrency.lockutils [None req-2f620b9e-0fbe-4cd8-a880-2520c6ad4568 tempest-DeleteServersAdminTestJSON-318637377 tempest-DeleteServersAdminTestJSON-318637377-project-member] Acquiring lock "refresh_cache-73142a3f-3be8-4956-90f8-6ca223d2d01f" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 641.189970] env[62503]: DEBUG oslo_concurrency.lockutils [None req-bf654c76-1dff-4311-afb8-80d00a5369c0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Acquiring lock "47d67edd-0860-49a6-ab7e-0511cffb82ae" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 641.193731] env[62503]: DEBUG oslo_concurrency.lockutils [None req-bf654c76-1dff-4311-afb8-80d00a5369c0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Lock "47d67edd-0860-49a6-ab7e-0511cffb82ae" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.004s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 641.196325] env[62503]: DEBUG oslo_concurrency.lockutils [None req-57a38f06-0431-4714-9351-3e81164f0fed tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Acquiring lock "141d7d04-0267-4e15-90ed-112ac8fb8c9b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 641.196536] env[62503]: DEBUG oslo_concurrency.lockutils [None req-57a38f06-0431-4714-9351-3e81164f0fed tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Lock "141d7d04-0267-4e15-90ed-112ac8fb8c9b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 641.319579] env[62503]: DEBUG nova.network.neutron [None req-733aadb7-44ff-4603-9cb7-3c04ddeea34f tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] [instance: 30befad4-aacb-44d5-87ed-4fc6b0e34bd6] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 641.360097] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1be53b7-103e-40fd-aa3c-cb9a9024d7fb {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.367895] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f806cbe-8969-4b26-baef-dc531db0eec4 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.404591] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63b8e3e7-9130-49fd-892c-618b529bb1a6 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.412516] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7b7b616-e5ff-43b7-a76a-61f83b47b724 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.426370] env[62503]: DEBUG nova.compute.provider_tree [None req-3c8eac85-f4ca-46c6-a47f-7220e201d8ac tempest-FloatingIPsAssociationTestJSON-190004196 tempest-FloatingIPsAssociationTestJSON-190004196-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 641.510246] env[62503]: DEBUG nova.network.neutron [req-4d1c2731-ecbf-4250-bef4-751e81d064a2 req-8dfd212b-75a7-4eca-b4c2-f4e7a6da0d83 service nova] [instance: 73142a3f-3be8-4956-90f8-6ca223d2d01f] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 641.613955] env[62503]: DEBUG nova.network.neutron [req-4d1c2731-ecbf-4250-bef4-751e81d064a2 req-8dfd212b-75a7-4eca-b4c2-f4e7a6da0d83 service nova] [instance: 73142a3f-3be8-4956-90f8-6ca223d2d01f] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 641.822543] env[62503]: DEBUG oslo_concurrency.lockutils [None req-733aadb7-44ff-4603-9cb7-3c04ddeea34f tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] Releasing lock "refresh_cache-30befad4-aacb-44d5-87ed-4fc6b0e34bd6" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 641.822823] env[62503]: DEBUG nova.compute.manager [None req-733aadb7-44ff-4603-9cb7-3c04ddeea34f tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62503) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3032}} [ 641.823013] env[62503]: DEBUG nova.compute.manager [None req-733aadb7-44ff-4603-9cb7-3c04ddeea34f tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] [instance: 30befad4-aacb-44d5-87ed-4fc6b0e34bd6] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 641.823192] env[62503]: DEBUG nova.network.neutron [None req-733aadb7-44ff-4603-9cb7-3c04ddeea34f tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] [instance: 30befad4-aacb-44d5-87ed-4fc6b0e34bd6] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 641.847092] env[62503]: DEBUG nova.network.neutron [None req-733aadb7-44ff-4603-9cb7-3c04ddeea34f tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] [instance: 30befad4-aacb-44d5-87ed-4fc6b0e34bd6] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 641.930371] env[62503]: DEBUG nova.scheduler.client.report [None req-3c8eac85-f4ca-46c6-a47f-7220e201d8ac tempest-FloatingIPsAssociationTestJSON-190004196 tempest-FloatingIPsAssociationTestJSON-190004196-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 642.084578] env[62503]: DEBUG oslo_concurrency.lockutils [None req-8c63ebde-167f-4614-8d90-7622283ae73c tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Acquiring lock "d4990c66-63d5-43b0-8187-2074c99ccde2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 642.084765] env[62503]: DEBUG oslo_concurrency.lockutils [None req-8c63ebde-167f-4614-8d90-7622283ae73c tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Lock "d4990c66-63d5-43b0-8187-2074c99ccde2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 642.118098] env[62503]: DEBUG oslo_concurrency.lockutils [req-4d1c2731-ecbf-4250-bef4-751e81d064a2 req-8dfd212b-75a7-4eca-b4c2-f4e7a6da0d83 service nova] Releasing lock "refresh_cache-73142a3f-3be8-4956-90f8-6ca223d2d01f" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 642.118387] env[62503]: DEBUG oslo_concurrency.lockutils [None req-2f620b9e-0fbe-4cd8-a880-2520c6ad4568 tempest-DeleteServersAdminTestJSON-318637377 tempest-DeleteServersAdminTestJSON-318637377-project-member] Acquired lock "refresh_cache-73142a3f-3be8-4956-90f8-6ca223d2d01f" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 642.118582] env[62503]: DEBUG nova.network.neutron [None req-2f620b9e-0fbe-4cd8-a880-2520c6ad4568 tempest-DeleteServersAdminTestJSON-318637377 tempest-DeleteServersAdminTestJSON-318637377-project-member] [instance: 73142a3f-3be8-4956-90f8-6ca223d2d01f] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 642.349597] env[62503]: DEBUG nova.network.neutron [None req-733aadb7-44ff-4603-9cb7-3c04ddeea34f tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] [instance: 30befad4-aacb-44d5-87ed-4fc6b0e34bd6] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 642.435995] env[62503]: DEBUG oslo_concurrency.lockutils [None req-3c8eac85-f4ca-46c6-a47f-7220e201d8ac tempest-FloatingIPsAssociationTestJSON-190004196 tempest-FloatingIPsAssociationTestJSON-190004196-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.033s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 642.436656] env[62503]: ERROR nova.compute.manager [None req-3c8eac85-f4ca-46c6-a47f-7220e201d8ac tempest-FloatingIPsAssociationTestJSON-190004196 tempest-FloatingIPsAssociationTestJSON-190004196-project-member] [instance: a8d8e232-6096-4da3-8f2c-65a5e5f713ae] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port f8cd1af9-b011-4670-86ad-4792342fda84, please check neutron logs for more information. [ 642.436656] env[62503]: ERROR nova.compute.manager [instance: a8d8e232-6096-4da3-8f2c-65a5e5f713ae] Traceback (most recent call last): [ 642.436656] env[62503]: ERROR nova.compute.manager [instance: a8d8e232-6096-4da3-8f2c-65a5e5f713ae] File "/opt/stack/nova/nova/compute/manager.py", line 2644, in _build_and_run_instance [ 642.436656] env[62503]: ERROR nova.compute.manager [instance: a8d8e232-6096-4da3-8f2c-65a5e5f713ae] self.driver.spawn(context, instance, image_meta, [ 642.436656] env[62503]: ERROR nova.compute.manager [instance: a8d8e232-6096-4da3-8f2c-65a5e5f713ae] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 642.436656] env[62503]: ERROR nova.compute.manager [instance: a8d8e232-6096-4da3-8f2c-65a5e5f713ae] self._vmops.spawn(context, instance, image_meta, injected_files, [ 642.436656] env[62503]: ERROR nova.compute.manager [instance: a8d8e232-6096-4da3-8f2c-65a5e5f713ae] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 642.436656] env[62503]: ERROR nova.compute.manager [instance: a8d8e232-6096-4da3-8f2c-65a5e5f713ae] vm_ref = self.build_virtual_machine(instance, [ 642.436656] env[62503]: ERROR nova.compute.manager [instance: a8d8e232-6096-4da3-8f2c-65a5e5f713ae] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 642.436656] env[62503]: ERROR nova.compute.manager [instance: a8d8e232-6096-4da3-8f2c-65a5e5f713ae] vif_infos = vmwarevif.get_vif_info(self._session, [ 642.436656] env[62503]: ERROR nova.compute.manager [instance: a8d8e232-6096-4da3-8f2c-65a5e5f713ae] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 642.436928] env[62503]: ERROR nova.compute.manager [instance: a8d8e232-6096-4da3-8f2c-65a5e5f713ae] for vif in network_info: [ 642.436928] env[62503]: ERROR nova.compute.manager [instance: a8d8e232-6096-4da3-8f2c-65a5e5f713ae] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 642.436928] env[62503]: ERROR nova.compute.manager [instance: a8d8e232-6096-4da3-8f2c-65a5e5f713ae] return self._sync_wrapper(fn, *args, **kwargs) [ 642.436928] env[62503]: ERROR nova.compute.manager [instance: a8d8e232-6096-4da3-8f2c-65a5e5f713ae] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 642.436928] env[62503]: ERROR nova.compute.manager [instance: a8d8e232-6096-4da3-8f2c-65a5e5f713ae] self.wait() [ 642.436928] env[62503]: ERROR nova.compute.manager [instance: a8d8e232-6096-4da3-8f2c-65a5e5f713ae] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 642.436928] env[62503]: ERROR nova.compute.manager [instance: a8d8e232-6096-4da3-8f2c-65a5e5f713ae] self[:] = self._gt.wait() [ 642.436928] env[62503]: ERROR nova.compute.manager [instance: a8d8e232-6096-4da3-8f2c-65a5e5f713ae] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 642.436928] env[62503]: ERROR nova.compute.manager [instance: a8d8e232-6096-4da3-8f2c-65a5e5f713ae] return self._exit_event.wait() [ 642.436928] env[62503]: ERROR nova.compute.manager [instance: a8d8e232-6096-4da3-8f2c-65a5e5f713ae] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 642.436928] env[62503]: ERROR nova.compute.manager [instance: a8d8e232-6096-4da3-8f2c-65a5e5f713ae] current.throw(*self._exc) [ 642.436928] env[62503]: ERROR nova.compute.manager [instance: a8d8e232-6096-4da3-8f2c-65a5e5f713ae] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 642.436928] env[62503]: ERROR nova.compute.manager [instance: a8d8e232-6096-4da3-8f2c-65a5e5f713ae] result = function(*args, **kwargs) [ 642.437315] env[62503]: ERROR nova.compute.manager [instance: a8d8e232-6096-4da3-8f2c-65a5e5f713ae] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 642.437315] env[62503]: ERROR nova.compute.manager [instance: a8d8e232-6096-4da3-8f2c-65a5e5f713ae] return func(*args, **kwargs) [ 642.437315] env[62503]: ERROR nova.compute.manager [instance: a8d8e232-6096-4da3-8f2c-65a5e5f713ae] File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 642.437315] env[62503]: ERROR nova.compute.manager [instance: a8d8e232-6096-4da3-8f2c-65a5e5f713ae] raise e [ 642.437315] env[62503]: ERROR nova.compute.manager [instance: a8d8e232-6096-4da3-8f2c-65a5e5f713ae] File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 642.437315] env[62503]: ERROR nova.compute.manager [instance: a8d8e232-6096-4da3-8f2c-65a5e5f713ae] nwinfo = self.network_api.allocate_for_instance( [ 642.437315] env[62503]: ERROR nova.compute.manager [instance: a8d8e232-6096-4da3-8f2c-65a5e5f713ae] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 642.437315] env[62503]: ERROR nova.compute.manager [instance: a8d8e232-6096-4da3-8f2c-65a5e5f713ae] created_port_ids = self._update_ports_for_instance( [ 642.437315] env[62503]: ERROR nova.compute.manager [instance: a8d8e232-6096-4da3-8f2c-65a5e5f713ae] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 642.437315] env[62503]: ERROR nova.compute.manager [instance: a8d8e232-6096-4da3-8f2c-65a5e5f713ae] with excutils.save_and_reraise_exception(): [ 642.437315] env[62503]: ERROR nova.compute.manager [instance: a8d8e232-6096-4da3-8f2c-65a5e5f713ae] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 642.437315] env[62503]: ERROR nova.compute.manager [instance: a8d8e232-6096-4da3-8f2c-65a5e5f713ae] self.force_reraise() [ 642.437315] env[62503]: ERROR nova.compute.manager [instance: a8d8e232-6096-4da3-8f2c-65a5e5f713ae] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 642.437603] env[62503]: ERROR nova.compute.manager [instance: a8d8e232-6096-4da3-8f2c-65a5e5f713ae] raise self.value [ 642.437603] env[62503]: ERROR nova.compute.manager [instance: a8d8e232-6096-4da3-8f2c-65a5e5f713ae] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 642.437603] env[62503]: ERROR nova.compute.manager [instance: a8d8e232-6096-4da3-8f2c-65a5e5f713ae] updated_port = self._update_port( [ 642.437603] env[62503]: ERROR nova.compute.manager [instance: a8d8e232-6096-4da3-8f2c-65a5e5f713ae] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 642.437603] env[62503]: ERROR nova.compute.manager [instance: a8d8e232-6096-4da3-8f2c-65a5e5f713ae] _ensure_no_port_binding_failure(port) [ 642.437603] env[62503]: ERROR nova.compute.manager [instance: a8d8e232-6096-4da3-8f2c-65a5e5f713ae] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 642.437603] env[62503]: ERROR nova.compute.manager [instance: a8d8e232-6096-4da3-8f2c-65a5e5f713ae] raise exception.PortBindingFailed(port_id=port['id']) [ 642.437603] env[62503]: ERROR nova.compute.manager [instance: a8d8e232-6096-4da3-8f2c-65a5e5f713ae] nova.exception.PortBindingFailed: Binding failed for port f8cd1af9-b011-4670-86ad-4792342fda84, please check neutron logs for more information. [ 642.437603] env[62503]: ERROR nova.compute.manager [instance: a8d8e232-6096-4da3-8f2c-65a5e5f713ae] [ 642.437804] env[62503]: DEBUG nova.compute.utils [None req-3c8eac85-f4ca-46c6-a47f-7220e201d8ac tempest-FloatingIPsAssociationTestJSON-190004196 tempest-FloatingIPsAssociationTestJSON-190004196-project-member] [instance: a8d8e232-6096-4da3-8f2c-65a5e5f713ae] Binding failed for port f8cd1af9-b011-4670-86ad-4792342fda84, please check neutron logs for more information. {{(pid=62503) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 642.438912] env[62503]: DEBUG oslo_concurrency.lockutils [None req-6b7abc00-006d-4855-9995-538e6edbbf28 tempest-ServersWithSpecificFlavorTestJSON-213993766 tempest-ServersWithSpecificFlavorTestJSON-213993766-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.871s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 642.440449] env[62503]: INFO nova.compute.claims [None req-6b7abc00-006d-4855-9995-538e6edbbf28 tempest-ServersWithSpecificFlavorTestJSON-213993766 tempest-ServersWithSpecificFlavorTestJSON-213993766-project-member] [instance: 5a1af72f-71c8-42de-aa71-f011d85210a5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 642.443186] env[62503]: DEBUG nova.compute.manager [None req-3c8eac85-f4ca-46c6-a47f-7220e201d8ac tempest-FloatingIPsAssociationTestJSON-190004196 tempest-FloatingIPsAssociationTestJSON-190004196-project-member] [instance: a8d8e232-6096-4da3-8f2c-65a5e5f713ae] Build of instance a8d8e232-6096-4da3-8f2c-65a5e5f713ae was re-scheduled: Binding failed for port f8cd1af9-b011-4670-86ad-4792342fda84, please check neutron logs for more information. {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2483}} [ 642.443618] env[62503]: DEBUG nova.compute.manager [None req-3c8eac85-f4ca-46c6-a47f-7220e201d8ac tempest-FloatingIPsAssociationTestJSON-190004196 tempest-FloatingIPsAssociationTestJSON-190004196-project-member] [instance: a8d8e232-6096-4da3-8f2c-65a5e5f713ae] Unplugging VIFs for instance {{(pid=62503) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3009}} [ 642.443837] env[62503]: DEBUG oslo_concurrency.lockutils [None req-3c8eac85-f4ca-46c6-a47f-7220e201d8ac tempest-FloatingIPsAssociationTestJSON-190004196 tempest-FloatingIPsAssociationTestJSON-190004196-project-member] Acquiring lock "refresh_cache-a8d8e232-6096-4da3-8f2c-65a5e5f713ae" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 642.444579] env[62503]: DEBUG oslo_concurrency.lockutils [None req-3c8eac85-f4ca-46c6-a47f-7220e201d8ac tempest-FloatingIPsAssociationTestJSON-190004196 tempest-FloatingIPsAssociationTestJSON-190004196-project-member] Acquired lock "refresh_cache-a8d8e232-6096-4da3-8f2c-65a5e5f713ae" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 642.444579] env[62503]: DEBUG nova.network.neutron [None req-3c8eac85-f4ca-46c6-a47f-7220e201d8ac tempest-FloatingIPsAssociationTestJSON-190004196 tempest-FloatingIPsAssociationTestJSON-190004196-project-member] [instance: a8d8e232-6096-4da3-8f2c-65a5e5f713ae] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 642.640885] env[62503]: DEBUG nova.network.neutron [None req-2f620b9e-0fbe-4cd8-a880-2520c6ad4568 tempest-DeleteServersAdminTestJSON-318637377 tempest-DeleteServersAdminTestJSON-318637377-project-member] [instance: 73142a3f-3be8-4956-90f8-6ca223d2d01f] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 642.700026] env[62503]: DEBUG nova.network.neutron [None req-2f620b9e-0fbe-4cd8-a880-2520c6ad4568 tempest-DeleteServersAdminTestJSON-318637377 tempest-DeleteServersAdminTestJSON-318637377-project-member] [instance: 73142a3f-3be8-4956-90f8-6ca223d2d01f] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 642.852535] env[62503]: INFO nova.compute.manager [None req-733aadb7-44ff-4603-9cb7-3c04ddeea34f tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] [instance: 30befad4-aacb-44d5-87ed-4fc6b0e34bd6] Took 1.03 seconds to deallocate network for instance. [ 642.967386] env[62503]: DEBUG nova.network.neutron [None req-3c8eac85-f4ca-46c6-a47f-7220e201d8ac tempest-FloatingIPsAssociationTestJSON-190004196 tempest-FloatingIPsAssociationTestJSON-190004196-project-member] [instance: a8d8e232-6096-4da3-8f2c-65a5e5f713ae] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 643.013221] env[62503]: DEBUG nova.compute.manager [req-e3b496cd-3362-4eba-ab77-1f35b325358c req-19ee90a6-74a2-47be-af12-186bdf08cbff service nova] [instance: 73142a3f-3be8-4956-90f8-6ca223d2d01f] Received event network-vif-deleted-95cc94d9-2d68-4482-b2e3-5457ec2518c9 {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 643.123012] env[62503]: DEBUG nova.network.neutron [None req-3c8eac85-f4ca-46c6-a47f-7220e201d8ac tempest-FloatingIPsAssociationTestJSON-190004196 tempest-FloatingIPsAssociationTestJSON-190004196-project-member] [instance: a8d8e232-6096-4da3-8f2c-65a5e5f713ae] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 643.203825] env[62503]: DEBUG oslo_concurrency.lockutils [None req-2f620b9e-0fbe-4cd8-a880-2520c6ad4568 tempest-DeleteServersAdminTestJSON-318637377 tempest-DeleteServersAdminTestJSON-318637377-project-member] Releasing lock "refresh_cache-73142a3f-3be8-4956-90f8-6ca223d2d01f" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 643.203825] env[62503]: DEBUG nova.compute.manager [None req-2f620b9e-0fbe-4cd8-a880-2520c6ad4568 tempest-DeleteServersAdminTestJSON-318637377 tempest-DeleteServersAdminTestJSON-318637377-project-member] [instance: 73142a3f-3be8-4956-90f8-6ca223d2d01f] Start destroying the instance on the hypervisor. {{(pid=62503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3156}} [ 643.203965] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-2f620b9e-0fbe-4cd8-a880-2520c6ad4568 tempest-DeleteServersAdminTestJSON-318637377 tempest-DeleteServersAdminTestJSON-318637377-project-member] [instance: 73142a3f-3be8-4956-90f8-6ca223d2d01f] Destroying instance {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 643.204503] env[62503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-39ad5af2-430d-459d-8b35-d7205913f856 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.213540] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96cc358d-4de6-45cc-b817-1ea159e1b6cc {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.236175] env[62503]: WARNING nova.virt.vmwareapi.vmops [None req-2f620b9e-0fbe-4cd8-a880-2520c6ad4568 tempest-DeleteServersAdminTestJSON-318637377 tempest-DeleteServersAdminTestJSON-318637377-project-member] [instance: 73142a3f-3be8-4956-90f8-6ca223d2d01f] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 73142a3f-3be8-4956-90f8-6ca223d2d01f could not be found. [ 643.236861] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-2f620b9e-0fbe-4cd8-a880-2520c6ad4568 tempest-DeleteServersAdminTestJSON-318637377 tempest-DeleteServersAdminTestJSON-318637377-project-member] [instance: 73142a3f-3be8-4956-90f8-6ca223d2d01f] Instance destroyed {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 643.236861] env[62503]: INFO nova.compute.manager [None req-2f620b9e-0fbe-4cd8-a880-2520c6ad4568 tempest-DeleteServersAdminTestJSON-318637377 tempest-DeleteServersAdminTestJSON-318637377-project-member] [instance: 73142a3f-3be8-4956-90f8-6ca223d2d01f] Took 0.03 seconds to destroy the instance on the hypervisor. [ 643.236861] env[62503]: DEBUG oslo.service.loopingcall [None req-2f620b9e-0fbe-4cd8-a880-2520c6ad4568 tempest-DeleteServersAdminTestJSON-318637377 tempest-DeleteServersAdminTestJSON-318637377-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 643.237032] env[62503]: DEBUG nova.compute.manager [-] [instance: 73142a3f-3be8-4956-90f8-6ca223d2d01f] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 643.237088] env[62503]: DEBUG nova.network.neutron [-] [instance: 73142a3f-3be8-4956-90f8-6ca223d2d01f] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 643.261471] env[62503]: DEBUG nova.network.neutron [-] [instance: 73142a3f-3be8-4956-90f8-6ca223d2d01f] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 643.627202] env[62503]: DEBUG oslo_concurrency.lockutils [None req-3c8eac85-f4ca-46c6-a47f-7220e201d8ac tempest-FloatingIPsAssociationTestJSON-190004196 tempest-FloatingIPsAssociationTestJSON-190004196-project-member] Releasing lock "refresh_cache-a8d8e232-6096-4da3-8f2c-65a5e5f713ae" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 643.627516] env[62503]: DEBUG nova.compute.manager [None req-3c8eac85-f4ca-46c6-a47f-7220e201d8ac tempest-FloatingIPsAssociationTestJSON-190004196 tempest-FloatingIPsAssociationTestJSON-190004196-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62503) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3032}} [ 643.627650] env[62503]: DEBUG nova.compute.manager [None req-3c8eac85-f4ca-46c6-a47f-7220e201d8ac tempest-FloatingIPsAssociationTestJSON-190004196 tempest-FloatingIPsAssociationTestJSON-190004196-project-member] [instance: a8d8e232-6096-4da3-8f2c-65a5e5f713ae] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 643.627819] env[62503]: DEBUG nova.network.neutron [None req-3c8eac85-f4ca-46c6-a47f-7220e201d8ac tempest-FloatingIPsAssociationTestJSON-190004196 tempest-FloatingIPsAssociationTestJSON-190004196-project-member] [instance: a8d8e232-6096-4da3-8f2c-65a5e5f713ae] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 643.651065] env[62503]: DEBUG nova.network.neutron [None req-3c8eac85-f4ca-46c6-a47f-7220e201d8ac tempest-FloatingIPsAssociationTestJSON-190004196 tempest-FloatingIPsAssociationTestJSON-190004196-project-member] [instance: a8d8e232-6096-4da3-8f2c-65a5e5f713ae] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 643.764022] env[62503]: DEBUG nova.network.neutron [-] [instance: 73142a3f-3be8-4956-90f8-6ca223d2d01f] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 643.832824] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17b10e3d-fc56-4d69-b6fe-af3d589d48b1 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.841474] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e59fcac-ef95-408e-882b-080c155d71a0 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.884914] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18697db4-9928-4d5b-b689-3f3bd7d72644 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.893525] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46221fd6-7e74-4a2e-a728-0296fa3e305d {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.908844] env[62503]: DEBUG nova.compute.provider_tree [None req-6b7abc00-006d-4855-9995-538e6edbbf28 tempest-ServersWithSpecificFlavorTestJSON-213993766 tempest-ServersWithSpecificFlavorTestJSON-213993766-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 643.910849] env[62503]: INFO nova.scheduler.client.report [None req-733aadb7-44ff-4603-9cb7-3c04ddeea34f tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] Deleted allocations for instance 30befad4-aacb-44d5-87ed-4fc6b0e34bd6 [ 644.156545] env[62503]: DEBUG nova.network.neutron [None req-3c8eac85-f4ca-46c6-a47f-7220e201d8ac tempest-FloatingIPsAssociationTestJSON-190004196 tempest-FloatingIPsAssociationTestJSON-190004196-project-member] [instance: a8d8e232-6096-4da3-8f2c-65a5e5f713ae] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 644.271253] env[62503]: INFO nova.compute.manager [-] [instance: 73142a3f-3be8-4956-90f8-6ca223d2d01f] Took 1.03 seconds to deallocate network for instance. [ 644.272197] env[62503]: DEBUG nova.compute.claims [None req-2f620b9e-0fbe-4cd8-a880-2520c6ad4568 tempest-DeleteServersAdminTestJSON-318637377 tempest-DeleteServersAdminTestJSON-318637377-project-member] [instance: 73142a3f-3be8-4956-90f8-6ca223d2d01f] Aborting claim: {{(pid=62503) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 644.273448] env[62503]: DEBUG oslo_concurrency.lockutils [None req-2f620b9e-0fbe-4cd8-a880-2520c6ad4568 tempest-DeleteServersAdminTestJSON-318637377 tempest-DeleteServersAdminTestJSON-318637377-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 644.424019] env[62503]: DEBUG nova.scheduler.client.report [None req-6b7abc00-006d-4855-9995-538e6edbbf28 tempest-ServersWithSpecificFlavorTestJSON-213993766 tempest-ServersWithSpecificFlavorTestJSON-213993766-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 644.431024] env[62503]: DEBUG oslo_concurrency.lockutils [None req-733aadb7-44ff-4603-9cb7-3c04ddeea34f tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] Lock "30befad4-aacb-44d5-87ed-4fc6b0e34bd6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 73.680s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 644.661654] env[62503]: INFO nova.compute.manager [None req-3c8eac85-f4ca-46c6-a47f-7220e201d8ac tempest-FloatingIPsAssociationTestJSON-190004196 tempest-FloatingIPsAssociationTestJSON-190004196-project-member] [instance: a8d8e232-6096-4da3-8f2c-65a5e5f713ae] Took 1.03 seconds to deallocate network for instance. [ 644.928744] env[62503]: DEBUG oslo_concurrency.lockutils [None req-6b7abc00-006d-4855-9995-538e6edbbf28 tempest-ServersWithSpecificFlavorTestJSON-213993766 tempest-ServersWithSpecificFlavorTestJSON-213993766-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.490s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 644.929494] env[62503]: DEBUG nova.compute.manager [None req-6b7abc00-006d-4855-9995-538e6edbbf28 tempest-ServersWithSpecificFlavorTestJSON-213993766 tempest-ServersWithSpecificFlavorTestJSON-213993766-project-member] [instance: 5a1af72f-71c8-42de-aa71-f011d85210a5] Start building networks asynchronously for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2832}} [ 644.932081] env[62503]: DEBUG oslo_concurrency.lockutils [None req-fa417a80-55e3-4106-aabc-7d0e3e09907d tempest-ListImageFiltersTestJSON-1898919175 tempest-ListImageFiltersTestJSON-1898919175-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.399s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 644.937412] env[62503]: INFO nova.compute.claims [None req-fa417a80-55e3-4106-aabc-7d0e3e09907d tempest-ListImageFiltersTestJSON-1898919175 tempest-ListImageFiltersTestJSON-1898919175-project-member] [instance: f0fefd82-1670-4bbb-b250-da0c3b6ca3f6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 644.941987] env[62503]: DEBUG nova.compute.manager [None req-04b5e2fe-420a-4980-828f-5aaaaac6b6d6 tempest-MigrationsAdminTest-1483146718 tempest-MigrationsAdminTest-1483146718-project-member] [instance: 0af1e65d-ca88-475e-a871-4087bd49cd9d] Starting instance... {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 645.446902] env[62503]: DEBUG nova.compute.utils [None req-6b7abc00-006d-4855-9995-538e6edbbf28 tempest-ServersWithSpecificFlavorTestJSON-213993766 tempest-ServersWithSpecificFlavorTestJSON-213993766-project-member] Using /dev/sd instead of None {{(pid=62503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 645.457022] env[62503]: DEBUG nova.compute.manager [None req-6b7abc00-006d-4855-9995-538e6edbbf28 tempest-ServersWithSpecificFlavorTestJSON-213993766 tempest-ServersWithSpecificFlavorTestJSON-213993766-project-member] [instance: 5a1af72f-71c8-42de-aa71-f011d85210a5] Allocating IP information in the background. {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 645.457022] env[62503]: DEBUG nova.network.neutron [None req-6b7abc00-006d-4855-9995-538e6edbbf28 tempest-ServersWithSpecificFlavorTestJSON-213993766 tempest-ServersWithSpecificFlavorTestJSON-213993766-project-member] [instance: 5a1af72f-71c8-42de-aa71-f011d85210a5] allocate_for_instance() {{(pid=62503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 645.474536] env[62503]: DEBUG oslo_concurrency.lockutils [None req-04b5e2fe-420a-4980-828f-5aaaaac6b6d6 tempest-MigrationsAdminTest-1483146718 tempest-MigrationsAdminTest-1483146718-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 645.536707] env[62503]: DEBUG nova.policy [None req-6b7abc00-006d-4855-9995-538e6edbbf28 tempest-ServersWithSpecificFlavorTestJSON-213993766 tempest-ServersWithSpecificFlavorTestJSON-213993766-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b4b65de4e0da4486be90b9fc876686cf', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'dae573aabf564e9cbd949522ff782053', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62503) authorize /opt/stack/nova/nova/policy.py:201}} [ 645.712559] env[62503]: INFO nova.scheduler.client.report [None req-3c8eac85-f4ca-46c6-a47f-7220e201d8ac tempest-FloatingIPsAssociationTestJSON-190004196 tempest-FloatingIPsAssociationTestJSON-190004196-project-member] Deleted allocations for instance a8d8e232-6096-4da3-8f2c-65a5e5f713ae [ 645.957417] env[62503]: DEBUG nova.compute.manager [None req-6b7abc00-006d-4855-9995-538e6edbbf28 tempest-ServersWithSpecificFlavorTestJSON-213993766 tempest-ServersWithSpecificFlavorTestJSON-213993766-project-member] [instance: 5a1af72f-71c8-42de-aa71-f011d85210a5] Start building block device mappings for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2867}} [ 646.209261] env[62503]: DEBUG oslo_concurrency.lockutils [None req-35b849ab-bc6b-4b84-91df-53a0636c028a tempest-ServerMetadataNegativeTestJSON-362530580 tempest-ServerMetadataNegativeTestJSON-362530580-project-member] Acquiring lock "48d9b18d-04b5-44e4-809e-383819d39418" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 646.210081] env[62503]: DEBUG oslo_concurrency.lockutils [None req-35b849ab-bc6b-4b84-91df-53a0636c028a tempest-ServerMetadataNegativeTestJSON-362530580 tempest-ServerMetadataNegativeTestJSON-362530580-project-member] Lock "48d9b18d-04b5-44e4-809e-383819d39418" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 646.230597] env[62503]: DEBUG oslo_concurrency.lockutils [None req-3c8eac85-f4ca-46c6-a47f-7220e201d8ac tempest-FloatingIPsAssociationTestJSON-190004196 tempest-FloatingIPsAssociationTestJSON-190004196-project-member] Lock "a8d8e232-6096-4da3-8f2c-65a5e5f713ae" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 71.844s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 646.371264] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1229aad7-587b-4732-84e1-cc317ac30644 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.380389] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3a80140-2bac-473b-b360-cae3455cc2bd {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.412760] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-deb7e784-0427-401d-b9b9-39ae26a7d94a {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.422729] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-393f5801-b4f9-4928-b101-56cc5f135f10 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.434511] env[62503]: DEBUG nova.compute.provider_tree [None req-fa417a80-55e3-4106-aabc-7d0e3e09907d tempest-ListImageFiltersTestJSON-1898919175 tempest-ListImageFiltersTestJSON-1898919175-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 646.517161] env[62503]: DEBUG nova.network.neutron [None req-6b7abc00-006d-4855-9995-538e6edbbf28 tempest-ServersWithSpecificFlavorTestJSON-213993766 tempest-ServersWithSpecificFlavorTestJSON-213993766-project-member] [instance: 5a1af72f-71c8-42de-aa71-f011d85210a5] Successfully created port: 2e596441-f659-4fc5-bd1e-0075ab21ae3b {{(pid=62503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 646.734585] env[62503]: DEBUG nova.compute.manager [None req-ffbee50c-b7fd-4096-9606-99a0d4599842 tempest-ServersTestJSON-1104182753 tempest-ServersTestJSON-1104182753-project-member] [instance: d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e] Starting instance... {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 646.938095] env[62503]: DEBUG nova.scheduler.client.report [None req-fa417a80-55e3-4106-aabc-7d0e3e09907d tempest-ListImageFiltersTestJSON-1898919175 tempest-ListImageFiltersTestJSON-1898919175-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 646.967197] env[62503]: DEBUG nova.compute.manager [None req-6b7abc00-006d-4855-9995-538e6edbbf28 tempest-ServersWithSpecificFlavorTestJSON-213993766 tempest-ServersWithSpecificFlavorTestJSON-213993766-project-member] [instance: 5a1af72f-71c8-42de-aa71-f011d85210a5] Start spawning the instance on the hypervisor. {{(pid=62503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2641}} [ 647.000073] env[62503]: DEBUG nova.virt.hardware [None req-6b7abc00-006d-4855-9995-538e6edbbf28 tempest-ServersWithSpecificFlavorTestJSON-213993766 tempest-ServersWithSpecificFlavorTestJSON-213993766-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:28:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='131126408',id=30,is_public=True,memory_mb=192,name='tempest-flavor_with_ephemeral_0-206591170',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:26:03Z,direct_url=,disk_format='vmdk',id=8150ca02-f879-471d-8913-459408f127a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26d9ee75d25b4018b8bb1fb11c8bd98c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:26:04Z,virtual_size=,visibility=), allow threads: False {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 647.000322] env[62503]: DEBUG nova.virt.hardware [None req-6b7abc00-006d-4855-9995-538e6edbbf28 tempest-ServersWithSpecificFlavorTestJSON-213993766 tempest-ServersWithSpecificFlavorTestJSON-213993766-project-member] Flavor limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 647.000484] env[62503]: DEBUG nova.virt.hardware [None req-6b7abc00-006d-4855-9995-538e6edbbf28 tempest-ServersWithSpecificFlavorTestJSON-213993766 tempest-ServersWithSpecificFlavorTestJSON-213993766-project-member] Image limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 647.000663] env[62503]: DEBUG nova.virt.hardware [None req-6b7abc00-006d-4855-9995-538e6edbbf28 tempest-ServersWithSpecificFlavorTestJSON-213993766 tempest-ServersWithSpecificFlavorTestJSON-213993766-project-member] Flavor pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 647.000799] env[62503]: DEBUG nova.virt.hardware [None req-6b7abc00-006d-4855-9995-538e6edbbf28 tempest-ServersWithSpecificFlavorTestJSON-213993766 tempest-ServersWithSpecificFlavorTestJSON-213993766-project-member] Image pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 647.000941] env[62503]: DEBUG nova.virt.hardware [None req-6b7abc00-006d-4855-9995-538e6edbbf28 tempest-ServersWithSpecificFlavorTestJSON-213993766 tempest-ServersWithSpecificFlavorTestJSON-213993766-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 647.001498] env[62503]: DEBUG nova.virt.hardware [None req-6b7abc00-006d-4855-9995-538e6edbbf28 tempest-ServersWithSpecificFlavorTestJSON-213993766 tempest-ServersWithSpecificFlavorTestJSON-213993766-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 647.001719] env[62503]: DEBUG nova.virt.hardware [None req-6b7abc00-006d-4855-9995-538e6edbbf28 tempest-ServersWithSpecificFlavorTestJSON-213993766 tempest-ServersWithSpecificFlavorTestJSON-213993766-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 647.001889] env[62503]: DEBUG nova.virt.hardware [None req-6b7abc00-006d-4855-9995-538e6edbbf28 tempest-ServersWithSpecificFlavorTestJSON-213993766 tempest-ServersWithSpecificFlavorTestJSON-213993766-project-member] Got 1 possible topologies {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 647.001985] env[62503]: DEBUG nova.virt.hardware [None req-6b7abc00-006d-4855-9995-538e6edbbf28 tempest-ServersWithSpecificFlavorTestJSON-213993766 tempest-ServersWithSpecificFlavorTestJSON-213993766-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 647.002174] env[62503]: DEBUG nova.virt.hardware [None req-6b7abc00-006d-4855-9995-538e6edbbf28 tempest-ServersWithSpecificFlavorTestJSON-213993766 tempest-ServersWithSpecificFlavorTestJSON-213993766-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 647.003285] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebc387ef-517d-4be8-8f69-775d4e832291 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.012233] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4964620d-13fb-4e7c-96e5-39f5bb7bdd8b {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.271537] env[62503]: DEBUG oslo_concurrency.lockutils [None req-ffbee50c-b7fd-4096-9606-99a0d4599842 tempest-ServersTestJSON-1104182753 tempest-ServersTestJSON-1104182753-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 647.443943] env[62503]: DEBUG oslo_concurrency.lockutils [None req-fa417a80-55e3-4106-aabc-7d0e3e09907d tempest-ListImageFiltersTestJSON-1898919175 tempest-ListImageFiltersTestJSON-1898919175-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.512s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 647.444496] env[62503]: DEBUG nova.compute.manager [None req-fa417a80-55e3-4106-aabc-7d0e3e09907d tempest-ListImageFiltersTestJSON-1898919175 tempest-ListImageFiltersTestJSON-1898919175-project-member] [instance: f0fefd82-1670-4bbb-b250-da0c3b6ca3f6] Start building networks asynchronously for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2832}} [ 647.451247] env[62503]: DEBUG oslo_concurrency.lockutils [None req-856c8a58-cdb0-4f72-ae8c-c9a975e5795d tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.428s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 647.451247] env[62503]: INFO nova.compute.claims [None req-856c8a58-cdb0-4f72-ae8c-c9a975e5795d tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: 0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 647.660158] env[62503]: DEBUG nova.compute.manager [req-ff5dbb12-f59e-4d82-abb3-fd2803fa5637 req-7834ddc1-d3f9-4ad2-891c-0653a392c76f service nova] [instance: 5a1af72f-71c8-42de-aa71-f011d85210a5] Received event network-changed-2e596441-f659-4fc5-bd1e-0075ab21ae3b {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 647.660367] env[62503]: DEBUG nova.compute.manager [req-ff5dbb12-f59e-4d82-abb3-fd2803fa5637 req-7834ddc1-d3f9-4ad2-891c-0653a392c76f service nova] [instance: 5a1af72f-71c8-42de-aa71-f011d85210a5] Refreshing instance network info cache due to event network-changed-2e596441-f659-4fc5-bd1e-0075ab21ae3b. {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11432}} [ 647.660592] env[62503]: DEBUG oslo_concurrency.lockutils [req-ff5dbb12-f59e-4d82-abb3-fd2803fa5637 req-7834ddc1-d3f9-4ad2-891c-0653a392c76f service nova] Acquiring lock "refresh_cache-5a1af72f-71c8-42de-aa71-f011d85210a5" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 647.660734] env[62503]: DEBUG oslo_concurrency.lockutils [req-ff5dbb12-f59e-4d82-abb3-fd2803fa5637 req-7834ddc1-d3f9-4ad2-891c-0653a392c76f service nova] Acquired lock "refresh_cache-5a1af72f-71c8-42de-aa71-f011d85210a5" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 647.660892] env[62503]: DEBUG nova.network.neutron [req-ff5dbb12-f59e-4d82-abb3-fd2803fa5637 req-7834ddc1-d3f9-4ad2-891c-0653a392c76f service nova] [instance: 5a1af72f-71c8-42de-aa71-f011d85210a5] Refreshing network info cache for port 2e596441-f659-4fc5-bd1e-0075ab21ae3b {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 647.863116] env[62503]: ERROR nova.compute.manager [None req-6b7abc00-006d-4855-9995-538e6edbbf28 tempest-ServersWithSpecificFlavorTestJSON-213993766 tempest-ServersWithSpecificFlavorTestJSON-213993766-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 2e596441-f659-4fc5-bd1e-0075ab21ae3b, please check neutron logs for more information. [ 647.863116] env[62503]: ERROR nova.compute.manager Traceback (most recent call last): [ 647.863116] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 647.863116] env[62503]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 647.863116] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 647.863116] env[62503]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 647.863116] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 647.863116] env[62503]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 647.863116] env[62503]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 647.863116] env[62503]: ERROR nova.compute.manager self.force_reraise() [ 647.863116] env[62503]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 647.863116] env[62503]: ERROR nova.compute.manager raise self.value [ 647.863116] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 647.863116] env[62503]: ERROR nova.compute.manager updated_port = self._update_port( [ 647.863116] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 647.863116] env[62503]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 647.863783] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 647.863783] env[62503]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 647.863783] env[62503]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 2e596441-f659-4fc5-bd1e-0075ab21ae3b, please check neutron logs for more information. [ 647.863783] env[62503]: ERROR nova.compute.manager [ 647.863783] env[62503]: Traceback (most recent call last): [ 647.863783] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 647.863783] env[62503]: listener.cb(fileno) [ 647.863783] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 647.863783] env[62503]: result = function(*args, **kwargs) [ 647.863783] env[62503]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 647.863783] env[62503]: return func(*args, **kwargs) [ 647.863783] env[62503]: File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 647.863783] env[62503]: raise e [ 647.863783] env[62503]: File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 647.863783] env[62503]: nwinfo = self.network_api.allocate_for_instance( [ 647.863783] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 647.863783] env[62503]: created_port_ids = self._update_ports_for_instance( [ 647.863783] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 647.863783] env[62503]: with excutils.save_and_reraise_exception(): [ 647.863783] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 647.863783] env[62503]: self.force_reraise() [ 647.863783] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 647.863783] env[62503]: raise self.value [ 647.863783] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 647.863783] env[62503]: updated_port = self._update_port( [ 647.863783] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 647.863783] env[62503]: _ensure_no_port_binding_failure(port) [ 647.863783] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 647.863783] env[62503]: raise exception.PortBindingFailed(port_id=port['id']) [ 647.864891] env[62503]: nova.exception.PortBindingFailed: Binding failed for port 2e596441-f659-4fc5-bd1e-0075ab21ae3b, please check neutron logs for more information. [ 647.864891] env[62503]: Removing descriptor: 16 [ 647.864891] env[62503]: ERROR nova.compute.manager [None req-6b7abc00-006d-4855-9995-538e6edbbf28 tempest-ServersWithSpecificFlavorTestJSON-213993766 tempest-ServersWithSpecificFlavorTestJSON-213993766-project-member] [instance: 5a1af72f-71c8-42de-aa71-f011d85210a5] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 2e596441-f659-4fc5-bd1e-0075ab21ae3b, please check neutron logs for more information. [ 647.864891] env[62503]: ERROR nova.compute.manager [instance: 5a1af72f-71c8-42de-aa71-f011d85210a5] Traceback (most recent call last): [ 647.864891] env[62503]: ERROR nova.compute.manager [instance: 5a1af72f-71c8-42de-aa71-f011d85210a5] File "/opt/stack/nova/nova/compute/manager.py", line 2897, in _build_resources [ 647.864891] env[62503]: ERROR nova.compute.manager [instance: 5a1af72f-71c8-42de-aa71-f011d85210a5] yield resources [ 647.864891] env[62503]: ERROR nova.compute.manager [instance: 5a1af72f-71c8-42de-aa71-f011d85210a5] File "/opt/stack/nova/nova/compute/manager.py", line 2644, in _build_and_run_instance [ 647.864891] env[62503]: ERROR nova.compute.manager [instance: 5a1af72f-71c8-42de-aa71-f011d85210a5] self.driver.spawn(context, instance, image_meta, [ 647.864891] env[62503]: ERROR nova.compute.manager [instance: 5a1af72f-71c8-42de-aa71-f011d85210a5] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 647.864891] env[62503]: ERROR nova.compute.manager [instance: 5a1af72f-71c8-42de-aa71-f011d85210a5] self._vmops.spawn(context, instance, image_meta, injected_files, [ 647.864891] env[62503]: ERROR nova.compute.manager [instance: 5a1af72f-71c8-42de-aa71-f011d85210a5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 647.864891] env[62503]: ERROR nova.compute.manager [instance: 5a1af72f-71c8-42de-aa71-f011d85210a5] vm_ref = self.build_virtual_machine(instance, [ 647.865248] env[62503]: ERROR nova.compute.manager [instance: 5a1af72f-71c8-42de-aa71-f011d85210a5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 647.865248] env[62503]: ERROR nova.compute.manager [instance: 5a1af72f-71c8-42de-aa71-f011d85210a5] vif_infos = vmwarevif.get_vif_info(self._session, [ 647.865248] env[62503]: ERROR nova.compute.manager [instance: 5a1af72f-71c8-42de-aa71-f011d85210a5] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 647.865248] env[62503]: ERROR nova.compute.manager [instance: 5a1af72f-71c8-42de-aa71-f011d85210a5] for vif in network_info: [ 647.865248] env[62503]: ERROR nova.compute.manager [instance: 5a1af72f-71c8-42de-aa71-f011d85210a5] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 647.865248] env[62503]: ERROR nova.compute.manager [instance: 5a1af72f-71c8-42de-aa71-f011d85210a5] return self._sync_wrapper(fn, *args, **kwargs) [ 647.865248] env[62503]: ERROR nova.compute.manager [instance: 5a1af72f-71c8-42de-aa71-f011d85210a5] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 647.865248] env[62503]: ERROR nova.compute.manager [instance: 5a1af72f-71c8-42de-aa71-f011d85210a5] self.wait() [ 647.865248] env[62503]: ERROR nova.compute.manager [instance: 5a1af72f-71c8-42de-aa71-f011d85210a5] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 647.865248] env[62503]: ERROR nova.compute.manager [instance: 5a1af72f-71c8-42de-aa71-f011d85210a5] self[:] = self._gt.wait() [ 647.865248] env[62503]: ERROR nova.compute.manager [instance: 5a1af72f-71c8-42de-aa71-f011d85210a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 647.865248] env[62503]: ERROR nova.compute.manager [instance: 5a1af72f-71c8-42de-aa71-f011d85210a5] return self._exit_event.wait() [ 647.865248] env[62503]: ERROR nova.compute.manager [instance: 5a1af72f-71c8-42de-aa71-f011d85210a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 647.865561] env[62503]: ERROR nova.compute.manager [instance: 5a1af72f-71c8-42de-aa71-f011d85210a5] result = hub.switch() [ 647.865561] env[62503]: ERROR nova.compute.manager [instance: 5a1af72f-71c8-42de-aa71-f011d85210a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 647.865561] env[62503]: ERROR nova.compute.manager [instance: 5a1af72f-71c8-42de-aa71-f011d85210a5] return self.greenlet.switch() [ 647.865561] env[62503]: ERROR nova.compute.manager [instance: 5a1af72f-71c8-42de-aa71-f011d85210a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 647.865561] env[62503]: ERROR nova.compute.manager [instance: 5a1af72f-71c8-42de-aa71-f011d85210a5] result = function(*args, **kwargs) [ 647.865561] env[62503]: ERROR nova.compute.manager [instance: 5a1af72f-71c8-42de-aa71-f011d85210a5] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 647.865561] env[62503]: ERROR nova.compute.manager [instance: 5a1af72f-71c8-42de-aa71-f011d85210a5] return func(*args, **kwargs) [ 647.865561] env[62503]: ERROR nova.compute.manager [instance: 5a1af72f-71c8-42de-aa71-f011d85210a5] File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 647.865561] env[62503]: ERROR nova.compute.manager [instance: 5a1af72f-71c8-42de-aa71-f011d85210a5] raise e [ 647.865561] env[62503]: ERROR nova.compute.manager [instance: 5a1af72f-71c8-42de-aa71-f011d85210a5] File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 647.865561] env[62503]: ERROR nova.compute.manager [instance: 5a1af72f-71c8-42de-aa71-f011d85210a5] nwinfo = self.network_api.allocate_for_instance( [ 647.865561] env[62503]: ERROR nova.compute.manager [instance: 5a1af72f-71c8-42de-aa71-f011d85210a5] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 647.865561] env[62503]: ERROR nova.compute.manager [instance: 5a1af72f-71c8-42de-aa71-f011d85210a5] created_port_ids = self._update_ports_for_instance( [ 647.865902] env[62503]: ERROR nova.compute.manager [instance: 5a1af72f-71c8-42de-aa71-f011d85210a5] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 647.865902] env[62503]: ERROR nova.compute.manager [instance: 5a1af72f-71c8-42de-aa71-f011d85210a5] with excutils.save_and_reraise_exception(): [ 647.865902] env[62503]: ERROR nova.compute.manager [instance: 5a1af72f-71c8-42de-aa71-f011d85210a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 647.865902] env[62503]: ERROR nova.compute.manager [instance: 5a1af72f-71c8-42de-aa71-f011d85210a5] self.force_reraise() [ 647.865902] env[62503]: ERROR nova.compute.manager [instance: 5a1af72f-71c8-42de-aa71-f011d85210a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 647.865902] env[62503]: ERROR nova.compute.manager [instance: 5a1af72f-71c8-42de-aa71-f011d85210a5] raise self.value [ 647.865902] env[62503]: ERROR nova.compute.manager [instance: 5a1af72f-71c8-42de-aa71-f011d85210a5] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 647.865902] env[62503]: ERROR nova.compute.manager [instance: 5a1af72f-71c8-42de-aa71-f011d85210a5] updated_port = self._update_port( [ 647.865902] env[62503]: ERROR nova.compute.manager [instance: 5a1af72f-71c8-42de-aa71-f011d85210a5] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 647.865902] env[62503]: ERROR nova.compute.manager [instance: 5a1af72f-71c8-42de-aa71-f011d85210a5] _ensure_no_port_binding_failure(port) [ 647.865902] env[62503]: ERROR nova.compute.manager [instance: 5a1af72f-71c8-42de-aa71-f011d85210a5] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 647.865902] env[62503]: ERROR nova.compute.manager [instance: 5a1af72f-71c8-42de-aa71-f011d85210a5] raise exception.PortBindingFailed(port_id=port['id']) [ 647.866256] env[62503]: ERROR nova.compute.manager [instance: 5a1af72f-71c8-42de-aa71-f011d85210a5] nova.exception.PortBindingFailed: Binding failed for port 2e596441-f659-4fc5-bd1e-0075ab21ae3b, please check neutron logs for more information. [ 647.866256] env[62503]: ERROR nova.compute.manager [instance: 5a1af72f-71c8-42de-aa71-f011d85210a5] [ 647.866256] env[62503]: INFO nova.compute.manager [None req-6b7abc00-006d-4855-9995-538e6edbbf28 tempest-ServersWithSpecificFlavorTestJSON-213993766 tempest-ServersWithSpecificFlavorTestJSON-213993766-project-member] [instance: 5a1af72f-71c8-42de-aa71-f011d85210a5] Terminating instance [ 647.869643] env[62503]: DEBUG oslo_concurrency.lockutils [None req-6b7abc00-006d-4855-9995-538e6edbbf28 tempest-ServersWithSpecificFlavorTestJSON-213993766 tempest-ServersWithSpecificFlavorTestJSON-213993766-project-member] Acquiring lock "refresh_cache-5a1af72f-71c8-42de-aa71-f011d85210a5" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 647.954392] env[62503]: DEBUG nova.compute.utils [None req-fa417a80-55e3-4106-aabc-7d0e3e09907d tempest-ListImageFiltersTestJSON-1898919175 tempest-ListImageFiltersTestJSON-1898919175-project-member] Using /dev/sd instead of None {{(pid=62503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 647.958995] env[62503]: DEBUG nova.compute.manager [None req-fa417a80-55e3-4106-aabc-7d0e3e09907d tempest-ListImageFiltersTestJSON-1898919175 tempest-ListImageFiltersTestJSON-1898919175-project-member] [instance: f0fefd82-1670-4bbb-b250-da0c3b6ca3f6] Allocating IP information in the background. {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 647.958995] env[62503]: DEBUG nova.network.neutron [None req-fa417a80-55e3-4106-aabc-7d0e3e09907d tempest-ListImageFiltersTestJSON-1898919175 tempest-ListImageFiltersTestJSON-1898919175-project-member] [instance: f0fefd82-1670-4bbb-b250-da0c3b6ca3f6] allocate_for_instance() {{(pid=62503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 648.052067] env[62503]: DEBUG nova.policy [None req-fa417a80-55e3-4106-aabc-7d0e3e09907d tempest-ListImageFiltersTestJSON-1898919175 tempest-ListImageFiltersTestJSON-1898919175-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b4a4fa5de8b24961bddc8c2756e4a1ac', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f2c4dd257d3d4101b6ea78c1b4188498', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62503) authorize /opt/stack/nova/nova/policy.py:201}} [ 648.203282] env[62503]: DEBUG nova.network.neutron [req-ff5dbb12-f59e-4d82-abb3-fd2803fa5637 req-7834ddc1-d3f9-4ad2-891c-0653a392c76f service nova] [instance: 5a1af72f-71c8-42de-aa71-f011d85210a5] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 648.440189] env[62503]: DEBUG nova.network.neutron [req-ff5dbb12-f59e-4d82-abb3-fd2803fa5637 req-7834ddc1-d3f9-4ad2-891c-0653a392c76f service nova] [instance: 5a1af72f-71c8-42de-aa71-f011d85210a5] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 648.463021] env[62503]: DEBUG nova.compute.manager [None req-fa417a80-55e3-4106-aabc-7d0e3e09907d tempest-ListImageFiltersTestJSON-1898919175 tempest-ListImageFiltersTestJSON-1898919175-project-member] [instance: f0fefd82-1670-4bbb-b250-da0c3b6ca3f6] Start building block device mappings for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2867}} [ 648.572276] env[62503]: DEBUG nova.network.neutron [None req-fa417a80-55e3-4106-aabc-7d0e3e09907d tempest-ListImageFiltersTestJSON-1898919175 tempest-ListImageFiltersTestJSON-1898919175-project-member] [instance: f0fefd82-1670-4bbb-b250-da0c3b6ca3f6] Successfully created port: 73b93393-7850-45ae-977f-e26fb12e6842 {{(pid=62503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 648.899651] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b00b41c-0d9d-49b3-8fd0-1e75d7c93184 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.908816] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a680b001-1f63-4953-aa93-e25b651f3b8d {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.945717] env[62503]: DEBUG oslo_concurrency.lockutils [req-ff5dbb12-f59e-4d82-abb3-fd2803fa5637 req-7834ddc1-d3f9-4ad2-891c-0653a392c76f service nova] Releasing lock "refresh_cache-5a1af72f-71c8-42de-aa71-f011d85210a5" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 648.946445] env[62503]: DEBUG oslo_concurrency.lockutils [None req-6b7abc00-006d-4855-9995-538e6edbbf28 tempest-ServersWithSpecificFlavorTestJSON-213993766 tempest-ServersWithSpecificFlavorTestJSON-213993766-project-member] Acquired lock "refresh_cache-5a1af72f-71c8-42de-aa71-f011d85210a5" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 648.946651] env[62503]: DEBUG nova.network.neutron [None req-6b7abc00-006d-4855-9995-538e6edbbf28 tempest-ServersWithSpecificFlavorTestJSON-213993766 tempest-ServersWithSpecificFlavorTestJSON-213993766-project-member] [instance: 5a1af72f-71c8-42de-aa71-f011d85210a5] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 648.949234] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0864cfa0-4eb9-4fc0-9937-b44c61060379 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.959031] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9d85122-b83a-4922-952e-52c9be7c2269 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.985977] env[62503]: DEBUG nova.compute.provider_tree [None req-856c8a58-cdb0-4f72-ae8c-c9a975e5795d tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 648.999662] env[62503]: DEBUG oslo_concurrency.lockutils [None req-5f6b79ba-d091-4f79-8bf5-714be49e7af9 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] Acquiring lock "b9259ced-344a-42e5-835d-3713631a68c7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 648.999662] env[62503]: DEBUG oslo_concurrency.lockutils [None req-5f6b79ba-d091-4f79-8bf5-714be49e7af9 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] Lock "b9259ced-344a-42e5-835d-3713631a68c7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 649.469443] env[62503]: DEBUG nova.network.neutron [None req-6b7abc00-006d-4855-9995-538e6edbbf28 tempest-ServersWithSpecificFlavorTestJSON-213993766 tempest-ServersWithSpecificFlavorTestJSON-213993766-project-member] [instance: 5a1af72f-71c8-42de-aa71-f011d85210a5] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 649.476651] env[62503]: DEBUG nova.compute.manager [None req-fa417a80-55e3-4106-aabc-7d0e3e09907d tempest-ListImageFiltersTestJSON-1898919175 tempest-ListImageFiltersTestJSON-1898919175-project-member] [instance: f0fefd82-1670-4bbb-b250-da0c3b6ca3f6] Start spawning the instance on the hypervisor. {{(pid=62503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2641}} [ 649.489898] env[62503]: DEBUG nova.scheduler.client.report [None req-856c8a58-cdb0-4f72-ae8c-c9a975e5795d tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 649.518029] env[62503]: DEBUG nova.virt.hardware [None req-fa417a80-55e3-4106-aabc-7d0e3e09907d tempest-ListImageFiltersTestJSON-1898919175 tempest-ListImageFiltersTestJSON-1898919175-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:26:20Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:26:03Z,direct_url=,disk_format='vmdk',id=8150ca02-f879-471d-8913-459408f127a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26d9ee75d25b4018b8bb1fb11c8bd98c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:26:04Z,virtual_size=,visibility=), allow threads: False {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 649.518161] env[62503]: DEBUG nova.virt.hardware [None req-fa417a80-55e3-4106-aabc-7d0e3e09907d tempest-ListImageFiltersTestJSON-1898919175 tempest-ListImageFiltersTestJSON-1898919175-project-member] Flavor limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 649.518322] env[62503]: DEBUG nova.virt.hardware [None req-fa417a80-55e3-4106-aabc-7d0e3e09907d tempest-ListImageFiltersTestJSON-1898919175 tempest-ListImageFiltersTestJSON-1898919175-project-member] Image limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 649.518691] env[62503]: DEBUG nova.virt.hardware [None req-fa417a80-55e3-4106-aabc-7d0e3e09907d tempest-ListImageFiltersTestJSON-1898919175 tempest-ListImageFiltersTestJSON-1898919175-project-member] Flavor pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 649.519031] env[62503]: DEBUG nova.virt.hardware [None req-fa417a80-55e3-4106-aabc-7d0e3e09907d tempest-ListImageFiltersTestJSON-1898919175 tempest-ListImageFiltersTestJSON-1898919175-project-member] Image pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 649.519274] env[62503]: DEBUG nova.virt.hardware [None req-fa417a80-55e3-4106-aabc-7d0e3e09907d tempest-ListImageFiltersTestJSON-1898919175 tempest-ListImageFiltersTestJSON-1898919175-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 649.519832] env[62503]: DEBUG nova.virt.hardware [None req-fa417a80-55e3-4106-aabc-7d0e3e09907d tempest-ListImageFiltersTestJSON-1898919175 tempest-ListImageFiltersTestJSON-1898919175-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 649.519832] env[62503]: DEBUG nova.virt.hardware [None req-fa417a80-55e3-4106-aabc-7d0e3e09907d tempest-ListImageFiltersTestJSON-1898919175 tempest-ListImageFiltersTestJSON-1898919175-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 649.519981] env[62503]: DEBUG nova.virt.hardware [None req-fa417a80-55e3-4106-aabc-7d0e3e09907d tempest-ListImageFiltersTestJSON-1898919175 tempest-ListImageFiltersTestJSON-1898919175-project-member] Got 1 possible topologies {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 649.520796] env[62503]: DEBUG nova.virt.hardware [None req-fa417a80-55e3-4106-aabc-7d0e3e09907d tempest-ListImageFiltersTestJSON-1898919175 tempest-ListImageFiltersTestJSON-1898919175-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 649.520796] env[62503]: DEBUG nova.virt.hardware [None req-fa417a80-55e3-4106-aabc-7d0e3e09907d tempest-ListImageFiltersTestJSON-1898919175 tempest-ListImageFiltersTestJSON-1898919175-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 649.521614] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5958415-0d5c-40e6-a1ad-69a1a0b3a716 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.532561] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2233970a-dc9a-4631-8803-5a0bb1479e49 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.664762] env[62503]: DEBUG nova.network.neutron [None req-6b7abc00-006d-4855-9995-538e6edbbf28 tempest-ServersWithSpecificFlavorTestJSON-213993766 tempest-ServersWithSpecificFlavorTestJSON-213993766-project-member] [instance: 5a1af72f-71c8-42de-aa71-f011d85210a5] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 649.767026] env[62503]: DEBUG nova.compute.manager [req-2f3f1466-6222-477f-bd56-d468673a6700 req-0c87ac92-2891-44f8-9688-3db9477f6167 service nova] [instance: 5a1af72f-71c8-42de-aa71-f011d85210a5] Received event network-vif-deleted-2e596441-f659-4fc5-bd1e-0075ab21ae3b {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 649.998289] env[62503]: DEBUG oslo_concurrency.lockutils [None req-856c8a58-cdb0-4f72-ae8c-c9a975e5795d tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.550s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 649.998884] env[62503]: DEBUG nova.compute.manager [None req-856c8a58-cdb0-4f72-ae8c-c9a975e5795d tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: 0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4] Start building networks asynchronously for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2832}} [ 650.002623] env[62503]: DEBUG oslo_concurrency.lockutils [None req-812ef84d-18c3-4e01-ae3f-ea5c4ae1494f tempest-ListImageFiltersTestJSON-1898919175 tempest-ListImageFiltersTestJSON-1898919175-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.022s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 650.003919] env[62503]: INFO nova.compute.claims [None req-812ef84d-18c3-4e01-ae3f-ea5c4ae1494f tempest-ListImageFiltersTestJSON-1898919175 tempest-ListImageFiltersTestJSON-1898919175-project-member] [instance: c32f170d-1e88-4716-a02a-b8db6896e900] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 650.076727] env[62503]: ERROR nova.compute.manager [None req-fa417a80-55e3-4106-aabc-7d0e3e09907d tempest-ListImageFiltersTestJSON-1898919175 tempest-ListImageFiltersTestJSON-1898919175-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 73b93393-7850-45ae-977f-e26fb12e6842, please check neutron logs for more information. [ 650.076727] env[62503]: ERROR nova.compute.manager Traceback (most recent call last): [ 650.076727] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 650.076727] env[62503]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 650.076727] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 650.076727] env[62503]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 650.076727] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 650.076727] env[62503]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 650.076727] env[62503]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 650.076727] env[62503]: ERROR nova.compute.manager self.force_reraise() [ 650.076727] env[62503]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 650.076727] env[62503]: ERROR nova.compute.manager raise self.value [ 650.076727] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 650.076727] env[62503]: ERROR nova.compute.manager updated_port = self._update_port( [ 650.076727] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 650.076727] env[62503]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 650.077355] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 650.077355] env[62503]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 650.077355] env[62503]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 73b93393-7850-45ae-977f-e26fb12e6842, please check neutron logs for more information. [ 650.077355] env[62503]: ERROR nova.compute.manager [ 650.077355] env[62503]: Traceback (most recent call last): [ 650.077355] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 650.077355] env[62503]: listener.cb(fileno) [ 650.077355] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 650.077355] env[62503]: result = function(*args, **kwargs) [ 650.077355] env[62503]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 650.077355] env[62503]: return func(*args, **kwargs) [ 650.077355] env[62503]: File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 650.077355] env[62503]: raise e [ 650.077355] env[62503]: File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 650.077355] env[62503]: nwinfo = self.network_api.allocate_for_instance( [ 650.077355] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 650.077355] env[62503]: created_port_ids = self._update_ports_for_instance( [ 650.077355] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 650.077355] env[62503]: with excutils.save_and_reraise_exception(): [ 650.077355] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 650.077355] env[62503]: self.force_reraise() [ 650.077355] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 650.077355] env[62503]: raise self.value [ 650.077355] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 650.077355] env[62503]: updated_port = self._update_port( [ 650.077355] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 650.077355] env[62503]: _ensure_no_port_binding_failure(port) [ 650.077355] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 650.077355] env[62503]: raise exception.PortBindingFailed(port_id=port['id']) [ 650.078239] env[62503]: nova.exception.PortBindingFailed: Binding failed for port 73b93393-7850-45ae-977f-e26fb12e6842, please check neutron logs for more information. [ 650.078239] env[62503]: Removing descriptor: 16 [ 650.078239] env[62503]: ERROR nova.compute.manager [None req-fa417a80-55e3-4106-aabc-7d0e3e09907d tempest-ListImageFiltersTestJSON-1898919175 tempest-ListImageFiltersTestJSON-1898919175-project-member] [instance: f0fefd82-1670-4bbb-b250-da0c3b6ca3f6] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 73b93393-7850-45ae-977f-e26fb12e6842, please check neutron logs for more information. [ 650.078239] env[62503]: ERROR nova.compute.manager [instance: f0fefd82-1670-4bbb-b250-da0c3b6ca3f6] Traceback (most recent call last): [ 650.078239] env[62503]: ERROR nova.compute.manager [instance: f0fefd82-1670-4bbb-b250-da0c3b6ca3f6] File "/opt/stack/nova/nova/compute/manager.py", line 2897, in _build_resources [ 650.078239] env[62503]: ERROR nova.compute.manager [instance: f0fefd82-1670-4bbb-b250-da0c3b6ca3f6] yield resources [ 650.078239] env[62503]: ERROR nova.compute.manager [instance: f0fefd82-1670-4bbb-b250-da0c3b6ca3f6] File "/opt/stack/nova/nova/compute/manager.py", line 2644, in _build_and_run_instance [ 650.078239] env[62503]: ERROR nova.compute.manager [instance: f0fefd82-1670-4bbb-b250-da0c3b6ca3f6] self.driver.spawn(context, instance, image_meta, [ 650.078239] env[62503]: ERROR nova.compute.manager [instance: f0fefd82-1670-4bbb-b250-da0c3b6ca3f6] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 650.078239] env[62503]: ERROR nova.compute.manager [instance: f0fefd82-1670-4bbb-b250-da0c3b6ca3f6] self._vmops.spawn(context, instance, image_meta, injected_files, [ 650.078239] env[62503]: ERROR nova.compute.manager [instance: f0fefd82-1670-4bbb-b250-da0c3b6ca3f6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 650.078239] env[62503]: ERROR nova.compute.manager [instance: f0fefd82-1670-4bbb-b250-da0c3b6ca3f6] vm_ref = self.build_virtual_machine(instance, [ 650.080619] env[62503]: ERROR nova.compute.manager [instance: f0fefd82-1670-4bbb-b250-da0c3b6ca3f6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 650.080619] env[62503]: ERROR nova.compute.manager [instance: f0fefd82-1670-4bbb-b250-da0c3b6ca3f6] vif_infos = vmwarevif.get_vif_info(self._session, [ 650.080619] env[62503]: ERROR nova.compute.manager [instance: f0fefd82-1670-4bbb-b250-da0c3b6ca3f6] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 650.080619] env[62503]: ERROR nova.compute.manager [instance: f0fefd82-1670-4bbb-b250-da0c3b6ca3f6] for vif in network_info: [ 650.080619] env[62503]: ERROR nova.compute.manager [instance: f0fefd82-1670-4bbb-b250-da0c3b6ca3f6] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 650.080619] env[62503]: ERROR nova.compute.manager [instance: f0fefd82-1670-4bbb-b250-da0c3b6ca3f6] return self._sync_wrapper(fn, *args, **kwargs) [ 650.080619] env[62503]: ERROR nova.compute.manager [instance: f0fefd82-1670-4bbb-b250-da0c3b6ca3f6] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 650.080619] env[62503]: ERROR nova.compute.manager [instance: f0fefd82-1670-4bbb-b250-da0c3b6ca3f6] self.wait() [ 650.080619] env[62503]: ERROR nova.compute.manager [instance: f0fefd82-1670-4bbb-b250-da0c3b6ca3f6] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 650.080619] env[62503]: ERROR nova.compute.manager [instance: f0fefd82-1670-4bbb-b250-da0c3b6ca3f6] self[:] = self._gt.wait() [ 650.080619] env[62503]: ERROR nova.compute.manager [instance: f0fefd82-1670-4bbb-b250-da0c3b6ca3f6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 650.080619] env[62503]: ERROR nova.compute.manager [instance: f0fefd82-1670-4bbb-b250-da0c3b6ca3f6] return self._exit_event.wait() [ 650.080619] env[62503]: ERROR nova.compute.manager [instance: f0fefd82-1670-4bbb-b250-da0c3b6ca3f6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 650.081095] env[62503]: ERROR nova.compute.manager [instance: f0fefd82-1670-4bbb-b250-da0c3b6ca3f6] result = hub.switch() [ 650.081095] env[62503]: ERROR nova.compute.manager [instance: f0fefd82-1670-4bbb-b250-da0c3b6ca3f6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 650.081095] env[62503]: ERROR nova.compute.manager [instance: f0fefd82-1670-4bbb-b250-da0c3b6ca3f6] return self.greenlet.switch() [ 650.081095] env[62503]: ERROR nova.compute.manager [instance: f0fefd82-1670-4bbb-b250-da0c3b6ca3f6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 650.081095] env[62503]: ERROR nova.compute.manager [instance: f0fefd82-1670-4bbb-b250-da0c3b6ca3f6] result = function(*args, **kwargs) [ 650.081095] env[62503]: ERROR nova.compute.manager [instance: f0fefd82-1670-4bbb-b250-da0c3b6ca3f6] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 650.081095] env[62503]: ERROR nova.compute.manager [instance: f0fefd82-1670-4bbb-b250-da0c3b6ca3f6] return func(*args, **kwargs) [ 650.081095] env[62503]: ERROR nova.compute.manager [instance: f0fefd82-1670-4bbb-b250-da0c3b6ca3f6] File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 650.081095] env[62503]: ERROR nova.compute.manager [instance: f0fefd82-1670-4bbb-b250-da0c3b6ca3f6] raise e [ 650.081095] env[62503]: ERROR nova.compute.manager [instance: f0fefd82-1670-4bbb-b250-da0c3b6ca3f6] File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 650.081095] env[62503]: ERROR nova.compute.manager [instance: f0fefd82-1670-4bbb-b250-da0c3b6ca3f6] nwinfo = self.network_api.allocate_for_instance( [ 650.081095] env[62503]: ERROR nova.compute.manager [instance: f0fefd82-1670-4bbb-b250-da0c3b6ca3f6] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 650.081095] env[62503]: ERROR nova.compute.manager [instance: f0fefd82-1670-4bbb-b250-da0c3b6ca3f6] created_port_ids = self._update_ports_for_instance( [ 650.081511] env[62503]: ERROR nova.compute.manager [instance: f0fefd82-1670-4bbb-b250-da0c3b6ca3f6] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 650.081511] env[62503]: ERROR nova.compute.manager [instance: f0fefd82-1670-4bbb-b250-da0c3b6ca3f6] with excutils.save_and_reraise_exception(): [ 650.081511] env[62503]: ERROR nova.compute.manager [instance: f0fefd82-1670-4bbb-b250-da0c3b6ca3f6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 650.081511] env[62503]: ERROR nova.compute.manager [instance: f0fefd82-1670-4bbb-b250-da0c3b6ca3f6] self.force_reraise() [ 650.081511] env[62503]: ERROR nova.compute.manager [instance: f0fefd82-1670-4bbb-b250-da0c3b6ca3f6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 650.081511] env[62503]: ERROR nova.compute.manager [instance: f0fefd82-1670-4bbb-b250-da0c3b6ca3f6] raise self.value [ 650.081511] env[62503]: ERROR nova.compute.manager [instance: f0fefd82-1670-4bbb-b250-da0c3b6ca3f6] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 650.081511] env[62503]: ERROR nova.compute.manager [instance: f0fefd82-1670-4bbb-b250-da0c3b6ca3f6] updated_port = self._update_port( [ 650.081511] env[62503]: ERROR nova.compute.manager [instance: f0fefd82-1670-4bbb-b250-da0c3b6ca3f6] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 650.081511] env[62503]: ERROR nova.compute.manager [instance: f0fefd82-1670-4bbb-b250-da0c3b6ca3f6] _ensure_no_port_binding_failure(port) [ 650.081511] env[62503]: ERROR nova.compute.manager [instance: f0fefd82-1670-4bbb-b250-da0c3b6ca3f6] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 650.081511] env[62503]: ERROR nova.compute.manager [instance: f0fefd82-1670-4bbb-b250-da0c3b6ca3f6] raise exception.PortBindingFailed(port_id=port['id']) [ 650.081827] env[62503]: ERROR nova.compute.manager [instance: f0fefd82-1670-4bbb-b250-da0c3b6ca3f6] nova.exception.PortBindingFailed: Binding failed for port 73b93393-7850-45ae-977f-e26fb12e6842, please check neutron logs for more information. [ 650.081827] env[62503]: ERROR nova.compute.manager [instance: f0fefd82-1670-4bbb-b250-da0c3b6ca3f6] [ 650.081827] env[62503]: INFO nova.compute.manager [None req-fa417a80-55e3-4106-aabc-7d0e3e09907d tempest-ListImageFiltersTestJSON-1898919175 tempest-ListImageFiltersTestJSON-1898919175-project-member] [instance: f0fefd82-1670-4bbb-b250-da0c3b6ca3f6] Terminating instance [ 650.086587] env[62503]: DEBUG oslo_concurrency.lockutils [None req-fa417a80-55e3-4106-aabc-7d0e3e09907d tempest-ListImageFiltersTestJSON-1898919175 tempest-ListImageFiltersTestJSON-1898919175-project-member] Acquiring lock "refresh_cache-f0fefd82-1670-4bbb-b250-da0c3b6ca3f6" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 650.086763] env[62503]: DEBUG oslo_concurrency.lockutils [None req-fa417a80-55e3-4106-aabc-7d0e3e09907d tempest-ListImageFiltersTestJSON-1898919175 tempest-ListImageFiltersTestJSON-1898919175-project-member] Acquired lock "refresh_cache-f0fefd82-1670-4bbb-b250-da0c3b6ca3f6" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 650.086931] env[62503]: DEBUG nova.network.neutron [None req-fa417a80-55e3-4106-aabc-7d0e3e09907d tempest-ListImageFiltersTestJSON-1898919175 tempest-ListImageFiltersTestJSON-1898919175-project-member] [instance: f0fefd82-1670-4bbb-b250-da0c3b6ca3f6] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 650.171025] env[62503]: DEBUG oslo_concurrency.lockutils [None req-6b7abc00-006d-4855-9995-538e6edbbf28 tempest-ServersWithSpecificFlavorTestJSON-213993766 tempest-ServersWithSpecificFlavorTestJSON-213993766-project-member] Releasing lock "refresh_cache-5a1af72f-71c8-42de-aa71-f011d85210a5" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 650.171025] env[62503]: DEBUG nova.compute.manager [None req-6b7abc00-006d-4855-9995-538e6edbbf28 tempest-ServersWithSpecificFlavorTestJSON-213993766 tempest-ServersWithSpecificFlavorTestJSON-213993766-project-member] [instance: 5a1af72f-71c8-42de-aa71-f011d85210a5] Start destroying the instance on the hypervisor. {{(pid=62503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3156}} [ 650.171025] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-6b7abc00-006d-4855-9995-538e6edbbf28 tempest-ServersWithSpecificFlavorTestJSON-213993766 tempest-ServersWithSpecificFlavorTestJSON-213993766-project-member] [instance: 5a1af72f-71c8-42de-aa71-f011d85210a5] Destroying instance {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 650.171025] env[62503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8776c2d5-8cba-40b3-aa05-52ff4ecce587 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.179631] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ebe9ace-c0ef-4f03-b907-6df0d70aff61 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.209197] env[62503]: WARNING nova.virt.vmwareapi.vmops [None req-6b7abc00-006d-4855-9995-538e6edbbf28 tempest-ServersWithSpecificFlavorTestJSON-213993766 tempest-ServersWithSpecificFlavorTestJSON-213993766-project-member] [instance: 5a1af72f-71c8-42de-aa71-f011d85210a5] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 5a1af72f-71c8-42de-aa71-f011d85210a5 could not be found. [ 650.209869] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-6b7abc00-006d-4855-9995-538e6edbbf28 tempest-ServersWithSpecificFlavorTestJSON-213993766 tempest-ServersWithSpecificFlavorTestJSON-213993766-project-member] [instance: 5a1af72f-71c8-42de-aa71-f011d85210a5] Instance destroyed {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 650.210243] env[62503]: INFO nova.compute.manager [None req-6b7abc00-006d-4855-9995-538e6edbbf28 tempest-ServersWithSpecificFlavorTestJSON-213993766 tempest-ServersWithSpecificFlavorTestJSON-213993766-project-member] [instance: 5a1af72f-71c8-42de-aa71-f011d85210a5] Took 0.04 seconds to destroy the instance on the hypervisor. [ 650.211119] env[62503]: DEBUG oslo.service.loopingcall [None req-6b7abc00-006d-4855-9995-538e6edbbf28 tempest-ServersWithSpecificFlavorTestJSON-213993766 tempest-ServersWithSpecificFlavorTestJSON-213993766-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 650.211119] env[62503]: DEBUG nova.compute.manager [-] [instance: 5a1af72f-71c8-42de-aa71-f011d85210a5] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 650.211119] env[62503]: DEBUG nova.network.neutron [-] [instance: 5a1af72f-71c8-42de-aa71-f011d85210a5] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 650.229783] env[62503]: DEBUG nova.network.neutron [-] [instance: 5a1af72f-71c8-42de-aa71-f011d85210a5] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 650.347845] env[62503]: DEBUG oslo_concurrency.lockutils [None req-7b8e1dad-5e5d-4734-b8ed-b199db0d06eb tempest-ServersNegativeTestMultiTenantJSON-701390068 tempest-ServersNegativeTestMultiTenantJSON-701390068-project-member] Acquiring lock "cf6fb485-9672-42b5-ac88-bbf5e0941393" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 650.348179] env[62503]: DEBUG oslo_concurrency.lockutils [None req-7b8e1dad-5e5d-4734-b8ed-b199db0d06eb tempest-ServersNegativeTestMultiTenantJSON-701390068 tempest-ServersNegativeTestMultiTenantJSON-701390068-project-member] Lock "cf6fb485-9672-42b5-ac88-bbf5e0941393" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 650.508383] env[62503]: DEBUG nova.compute.utils [None req-856c8a58-cdb0-4f72-ae8c-c9a975e5795d tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Using /dev/sd instead of None {{(pid=62503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 650.512449] env[62503]: DEBUG nova.compute.manager [None req-856c8a58-cdb0-4f72-ae8c-c9a975e5795d tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: 0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4] Allocating IP information in the background. {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 650.512729] env[62503]: DEBUG nova.network.neutron [None req-856c8a58-cdb0-4f72-ae8c-c9a975e5795d tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: 0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4] allocate_for_instance() {{(pid=62503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 650.553620] env[62503]: DEBUG nova.policy [None req-856c8a58-cdb0-4f72-ae8c-c9a975e5795d tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '691809da402d4a29b085cfe3b22306b2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a833cd3315d0487cb3badd7b0d330a9a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62503) authorize /opt/stack/nova/nova/policy.py:201}} [ 650.604442] env[62503]: DEBUG nova.network.neutron [None req-fa417a80-55e3-4106-aabc-7d0e3e09907d tempest-ListImageFiltersTestJSON-1898919175 tempest-ListImageFiltersTestJSON-1898919175-project-member] [instance: f0fefd82-1670-4bbb-b250-da0c3b6ca3f6] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 650.691647] env[62503]: DEBUG nova.network.neutron [None req-fa417a80-55e3-4106-aabc-7d0e3e09907d tempest-ListImageFiltersTestJSON-1898919175 tempest-ListImageFiltersTestJSON-1898919175-project-member] [instance: f0fefd82-1670-4bbb-b250-da0c3b6ca3f6] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 650.733126] env[62503]: DEBUG nova.network.neutron [-] [instance: 5a1af72f-71c8-42de-aa71-f011d85210a5] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 651.016196] env[62503]: DEBUG nova.compute.manager [None req-856c8a58-cdb0-4f72-ae8c-c9a975e5795d tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: 0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4] Start building block device mappings for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2867}} [ 651.070989] env[62503]: DEBUG nova.network.neutron [None req-856c8a58-cdb0-4f72-ae8c-c9a975e5795d tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: 0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4] Successfully created port: 875038d9-e4a3-44de-82ca-eaa4a28ed28c {{(pid=62503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 651.197649] env[62503]: DEBUG oslo_concurrency.lockutils [None req-fa417a80-55e3-4106-aabc-7d0e3e09907d tempest-ListImageFiltersTestJSON-1898919175 tempest-ListImageFiltersTestJSON-1898919175-project-member] Releasing lock "refresh_cache-f0fefd82-1670-4bbb-b250-da0c3b6ca3f6" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 651.198073] env[62503]: DEBUG nova.compute.manager [None req-fa417a80-55e3-4106-aabc-7d0e3e09907d tempest-ListImageFiltersTestJSON-1898919175 tempest-ListImageFiltersTestJSON-1898919175-project-member] [instance: f0fefd82-1670-4bbb-b250-da0c3b6ca3f6] Start destroying the instance on the hypervisor. {{(pid=62503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3156}} [ 651.198398] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-fa417a80-55e3-4106-aabc-7d0e3e09907d tempest-ListImageFiltersTestJSON-1898919175 tempest-ListImageFiltersTestJSON-1898919175-project-member] [instance: f0fefd82-1670-4bbb-b250-da0c3b6ca3f6] Destroying instance {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 651.198607] env[62503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-31db1701-94e3-4d06-a119-ac6189a80289 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.207701] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee3b8ca0-0937-4604-9216-790809573167 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.235695] env[62503]: WARNING nova.virt.vmwareapi.vmops [None req-fa417a80-55e3-4106-aabc-7d0e3e09907d tempest-ListImageFiltersTestJSON-1898919175 tempest-ListImageFiltersTestJSON-1898919175-project-member] [instance: f0fefd82-1670-4bbb-b250-da0c3b6ca3f6] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance f0fefd82-1670-4bbb-b250-da0c3b6ca3f6 could not be found. [ 651.235695] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-fa417a80-55e3-4106-aabc-7d0e3e09907d tempest-ListImageFiltersTestJSON-1898919175 tempest-ListImageFiltersTestJSON-1898919175-project-member] [instance: f0fefd82-1670-4bbb-b250-da0c3b6ca3f6] Instance destroyed {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 651.235695] env[62503]: INFO nova.compute.manager [None req-fa417a80-55e3-4106-aabc-7d0e3e09907d tempest-ListImageFiltersTestJSON-1898919175 tempest-ListImageFiltersTestJSON-1898919175-project-member] [instance: f0fefd82-1670-4bbb-b250-da0c3b6ca3f6] Took 0.04 seconds to destroy the instance on the hypervisor. [ 651.235695] env[62503]: DEBUG oslo.service.loopingcall [None req-fa417a80-55e3-4106-aabc-7d0e3e09907d tempest-ListImageFiltersTestJSON-1898919175 tempest-ListImageFiltersTestJSON-1898919175-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 651.237545] env[62503]: INFO nova.compute.manager [-] [instance: 5a1af72f-71c8-42de-aa71-f011d85210a5] Took 1.03 seconds to deallocate network for instance. [ 651.237752] env[62503]: DEBUG nova.compute.manager [-] [instance: f0fefd82-1670-4bbb-b250-da0c3b6ca3f6] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 651.237847] env[62503]: DEBUG nova.network.neutron [-] [instance: f0fefd82-1670-4bbb-b250-da0c3b6ca3f6] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 651.244017] env[62503]: DEBUG nova.compute.claims [None req-6b7abc00-006d-4855-9995-538e6edbbf28 tempest-ServersWithSpecificFlavorTestJSON-213993766 tempest-ServersWithSpecificFlavorTestJSON-213993766-project-member] [instance: 5a1af72f-71c8-42de-aa71-f011d85210a5] Aborting claim: {{(pid=62503) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 651.244017] env[62503]: DEBUG oslo_concurrency.lockutils [None req-6b7abc00-006d-4855-9995-538e6edbbf28 tempest-ServersWithSpecificFlavorTestJSON-213993766 tempest-ServersWithSpecificFlavorTestJSON-213993766-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 651.273112] env[62503]: DEBUG nova.network.neutron [-] [instance: f0fefd82-1670-4bbb-b250-da0c3b6ca3f6] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 651.438780] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd58122d-3bd9-4cf4-abd7-1e86b3da30de {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.446150] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dda8cd8d-026d-4563-b6b6-69e18f75ff2a {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.475501] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-395d74e9-167f-47af-8ea7-7ea283db2a0a {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.482703] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-daef018d-e850-4399-9407-a8611331f766 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.495970] env[62503]: DEBUG nova.compute.provider_tree [None req-812ef84d-18c3-4e01-ae3f-ea5c4ae1494f tempest-ListImageFiltersTestJSON-1898919175 tempest-ListImageFiltersTestJSON-1898919175-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 651.775714] env[62503]: DEBUG nova.network.neutron [-] [instance: f0fefd82-1670-4bbb-b250-da0c3b6ca3f6] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 651.789328] env[62503]: DEBUG nova.compute.manager [req-114718b6-7118-4323-9306-5cf02f893d4b req-8848f8c5-c460-4396-9252-af04ac500ea5 service nova] [instance: f0fefd82-1670-4bbb-b250-da0c3b6ca3f6] Received event network-changed-73b93393-7850-45ae-977f-e26fb12e6842 {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 651.789328] env[62503]: DEBUG nova.compute.manager [req-114718b6-7118-4323-9306-5cf02f893d4b req-8848f8c5-c460-4396-9252-af04ac500ea5 service nova] [instance: f0fefd82-1670-4bbb-b250-da0c3b6ca3f6] Refreshing instance network info cache due to event network-changed-73b93393-7850-45ae-977f-e26fb12e6842. {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11432}} [ 651.789328] env[62503]: DEBUG oslo_concurrency.lockutils [req-114718b6-7118-4323-9306-5cf02f893d4b req-8848f8c5-c460-4396-9252-af04ac500ea5 service nova] Acquiring lock "refresh_cache-f0fefd82-1670-4bbb-b250-da0c3b6ca3f6" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 651.789328] env[62503]: DEBUG oslo_concurrency.lockutils [req-114718b6-7118-4323-9306-5cf02f893d4b req-8848f8c5-c460-4396-9252-af04ac500ea5 service nova] Acquired lock "refresh_cache-f0fefd82-1670-4bbb-b250-da0c3b6ca3f6" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 651.789328] env[62503]: DEBUG nova.network.neutron [req-114718b6-7118-4323-9306-5cf02f893d4b req-8848f8c5-c460-4396-9252-af04ac500ea5 service nova] [instance: f0fefd82-1670-4bbb-b250-da0c3b6ca3f6] Refreshing network info cache for port 73b93393-7850-45ae-977f-e26fb12e6842 {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 651.998990] env[62503]: DEBUG nova.scheduler.client.report [None req-812ef84d-18c3-4e01-ae3f-ea5c4ae1494f tempest-ListImageFiltersTestJSON-1898919175 tempest-ListImageFiltersTestJSON-1898919175-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 652.029521] env[62503]: DEBUG nova.compute.manager [None req-856c8a58-cdb0-4f72-ae8c-c9a975e5795d tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: 0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4] Start spawning the instance on the hypervisor. {{(pid=62503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2641}} [ 652.046249] env[62503]: ERROR nova.compute.manager [None req-856c8a58-cdb0-4f72-ae8c-c9a975e5795d tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 875038d9-e4a3-44de-82ca-eaa4a28ed28c, please check neutron logs for more information. [ 652.046249] env[62503]: ERROR nova.compute.manager Traceback (most recent call last): [ 652.046249] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 652.046249] env[62503]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 652.046249] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 652.046249] env[62503]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 652.046249] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 652.046249] env[62503]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 652.046249] env[62503]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 652.046249] env[62503]: ERROR nova.compute.manager self.force_reraise() [ 652.046249] env[62503]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 652.046249] env[62503]: ERROR nova.compute.manager raise self.value [ 652.046249] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 652.046249] env[62503]: ERROR nova.compute.manager updated_port = self._update_port( [ 652.046249] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 652.046249] env[62503]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 652.046963] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 652.046963] env[62503]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 652.046963] env[62503]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 875038d9-e4a3-44de-82ca-eaa4a28ed28c, please check neutron logs for more information. [ 652.046963] env[62503]: ERROR nova.compute.manager [ 652.046963] env[62503]: Traceback (most recent call last): [ 652.046963] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 652.046963] env[62503]: listener.cb(fileno) [ 652.046963] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 652.046963] env[62503]: result = function(*args, **kwargs) [ 652.046963] env[62503]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 652.046963] env[62503]: return func(*args, **kwargs) [ 652.046963] env[62503]: File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 652.046963] env[62503]: raise e [ 652.046963] env[62503]: File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 652.046963] env[62503]: nwinfo = self.network_api.allocate_for_instance( [ 652.046963] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 652.046963] env[62503]: created_port_ids = self._update_ports_for_instance( [ 652.046963] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 652.046963] env[62503]: with excutils.save_and_reraise_exception(): [ 652.046963] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 652.046963] env[62503]: self.force_reraise() [ 652.046963] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 652.046963] env[62503]: raise self.value [ 652.046963] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 652.046963] env[62503]: updated_port = self._update_port( [ 652.046963] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 652.046963] env[62503]: _ensure_no_port_binding_failure(port) [ 652.046963] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 652.046963] env[62503]: raise exception.PortBindingFailed(port_id=port['id']) [ 652.047679] env[62503]: nova.exception.PortBindingFailed: Binding failed for port 875038d9-e4a3-44de-82ca-eaa4a28ed28c, please check neutron logs for more information. [ 652.047679] env[62503]: Removing descriptor: 16 [ 652.063867] env[62503]: DEBUG nova.virt.hardware [None req-856c8a58-cdb0-4f72-ae8c-c9a975e5795d tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:26:20Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:26:03Z,direct_url=,disk_format='vmdk',id=8150ca02-f879-471d-8913-459408f127a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26d9ee75d25b4018b8bb1fb11c8bd98c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:26:04Z,virtual_size=,visibility=), allow threads: False {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 652.064137] env[62503]: DEBUG nova.virt.hardware [None req-856c8a58-cdb0-4f72-ae8c-c9a975e5795d tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Flavor limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 652.064297] env[62503]: DEBUG nova.virt.hardware [None req-856c8a58-cdb0-4f72-ae8c-c9a975e5795d tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Image limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 652.064525] env[62503]: DEBUG nova.virt.hardware [None req-856c8a58-cdb0-4f72-ae8c-c9a975e5795d tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Flavor pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 652.064725] env[62503]: DEBUG nova.virt.hardware [None req-856c8a58-cdb0-4f72-ae8c-c9a975e5795d tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Image pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 652.068074] env[62503]: DEBUG nova.virt.hardware [None req-856c8a58-cdb0-4f72-ae8c-c9a975e5795d tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 652.068074] env[62503]: DEBUG nova.virt.hardware [None req-856c8a58-cdb0-4f72-ae8c-c9a975e5795d tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 652.068074] env[62503]: DEBUG nova.virt.hardware [None req-856c8a58-cdb0-4f72-ae8c-c9a975e5795d tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 652.068074] env[62503]: DEBUG nova.virt.hardware [None req-856c8a58-cdb0-4f72-ae8c-c9a975e5795d tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Got 1 possible topologies {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 652.068074] env[62503]: DEBUG nova.virt.hardware [None req-856c8a58-cdb0-4f72-ae8c-c9a975e5795d tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 652.068289] env[62503]: DEBUG nova.virt.hardware [None req-856c8a58-cdb0-4f72-ae8c-c9a975e5795d tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 652.068289] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f715088-ed0a-4c5e-9309-cae1219eda0d {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.074994] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea159275-db56-4482-8b72-c43d62a01d21 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.092085] env[62503]: ERROR nova.compute.manager [None req-856c8a58-cdb0-4f72-ae8c-c9a975e5795d tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: 0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 875038d9-e4a3-44de-82ca-eaa4a28ed28c, please check neutron logs for more information. [ 652.092085] env[62503]: ERROR nova.compute.manager [instance: 0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4] Traceback (most recent call last): [ 652.092085] env[62503]: ERROR nova.compute.manager [instance: 0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4] File "/opt/stack/nova/nova/compute/manager.py", line 2897, in _build_resources [ 652.092085] env[62503]: ERROR nova.compute.manager [instance: 0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4] yield resources [ 652.092085] env[62503]: ERROR nova.compute.manager [instance: 0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4] File "/opt/stack/nova/nova/compute/manager.py", line 2644, in _build_and_run_instance [ 652.092085] env[62503]: ERROR nova.compute.manager [instance: 0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4] self.driver.spawn(context, instance, image_meta, [ 652.092085] env[62503]: ERROR nova.compute.manager [instance: 0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 652.092085] env[62503]: ERROR nova.compute.manager [instance: 0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 652.092085] env[62503]: ERROR nova.compute.manager [instance: 0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 652.092085] env[62503]: ERROR nova.compute.manager [instance: 0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4] vm_ref = self.build_virtual_machine(instance, [ 652.092085] env[62503]: ERROR nova.compute.manager [instance: 0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 652.092673] env[62503]: ERROR nova.compute.manager [instance: 0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4] vif_infos = vmwarevif.get_vif_info(self._session, [ 652.092673] env[62503]: ERROR nova.compute.manager [instance: 0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 652.092673] env[62503]: ERROR nova.compute.manager [instance: 0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4] for vif in network_info: [ 652.092673] env[62503]: ERROR nova.compute.manager [instance: 0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 652.092673] env[62503]: ERROR nova.compute.manager [instance: 0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4] return self._sync_wrapper(fn, *args, **kwargs) [ 652.092673] env[62503]: ERROR nova.compute.manager [instance: 0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 652.092673] env[62503]: ERROR nova.compute.manager [instance: 0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4] self.wait() [ 652.092673] env[62503]: ERROR nova.compute.manager [instance: 0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 652.092673] env[62503]: ERROR nova.compute.manager [instance: 0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4] self[:] = self._gt.wait() [ 652.092673] env[62503]: ERROR nova.compute.manager [instance: 0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 652.092673] env[62503]: ERROR nova.compute.manager [instance: 0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4] return self._exit_event.wait() [ 652.092673] env[62503]: ERROR nova.compute.manager [instance: 0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 652.092673] env[62503]: ERROR nova.compute.manager [instance: 0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4] current.throw(*self._exc) [ 652.093211] env[62503]: ERROR nova.compute.manager [instance: 0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 652.093211] env[62503]: ERROR nova.compute.manager [instance: 0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4] result = function(*args, **kwargs) [ 652.093211] env[62503]: ERROR nova.compute.manager [instance: 0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 652.093211] env[62503]: ERROR nova.compute.manager [instance: 0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4] return func(*args, **kwargs) [ 652.093211] env[62503]: ERROR nova.compute.manager [instance: 0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4] File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 652.093211] env[62503]: ERROR nova.compute.manager [instance: 0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4] raise e [ 652.093211] env[62503]: ERROR nova.compute.manager [instance: 0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4] File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 652.093211] env[62503]: ERROR nova.compute.manager [instance: 0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4] nwinfo = self.network_api.allocate_for_instance( [ 652.093211] env[62503]: ERROR nova.compute.manager [instance: 0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 652.093211] env[62503]: ERROR nova.compute.manager [instance: 0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4] created_port_ids = self._update_ports_for_instance( [ 652.093211] env[62503]: ERROR nova.compute.manager [instance: 0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 652.093211] env[62503]: ERROR nova.compute.manager [instance: 0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4] with excutils.save_and_reraise_exception(): [ 652.093211] env[62503]: ERROR nova.compute.manager [instance: 0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 652.093798] env[62503]: ERROR nova.compute.manager [instance: 0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4] self.force_reraise() [ 652.093798] env[62503]: ERROR nova.compute.manager [instance: 0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 652.093798] env[62503]: ERROR nova.compute.manager [instance: 0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4] raise self.value [ 652.093798] env[62503]: ERROR nova.compute.manager [instance: 0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 652.093798] env[62503]: ERROR nova.compute.manager [instance: 0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4] updated_port = self._update_port( [ 652.093798] env[62503]: ERROR nova.compute.manager [instance: 0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 652.093798] env[62503]: ERROR nova.compute.manager [instance: 0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4] _ensure_no_port_binding_failure(port) [ 652.093798] env[62503]: ERROR nova.compute.manager [instance: 0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 652.093798] env[62503]: ERROR nova.compute.manager [instance: 0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4] raise exception.PortBindingFailed(port_id=port['id']) [ 652.093798] env[62503]: ERROR nova.compute.manager [instance: 0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4] nova.exception.PortBindingFailed: Binding failed for port 875038d9-e4a3-44de-82ca-eaa4a28ed28c, please check neutron logs for more information. [ 652.093798] env[62503]: ERROR nova.compute.manager [instance: 0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4] [ 652.093798] env[62503]: INFO nova.compute.manager [None req-856c8a58-cdb0-4f72-ae8c-c9a975e5795d tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: 0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4] Terminating instance [ 652.095835] env[62503]: DEBUG oslo_concurrency.lockutils [None req-856c8a58-cdb0-4f72-ae8c-c9a975e5795d tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Acquiring lock "refresh_cache-0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 652.095995] env[62503]: DEBUG oslo_concurrency.lockutils [None req-856c8a58-cdb0-4f72-ae8c-c9a975e5795d tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Acquired lock "refresh_cache-0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 652.096172] env[62503]: DEBUG nova.network.neutron [None req-856c8a58-cdb0-4f72-ae8c-c9a975e5795d tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: 0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 652.278708] env[62503]: INFO nova.compute.manager [-] [instance: f0fefd82-1670-4bbb-b250-da0c3b6ca3f6] Took 1.04 seconds to deallocate network for instance. [ 652.286521] env[62503]: DEBUG nova.compute.claims [None req-fa417a80-55e3-4106-aabc-7d0e3e09907d tempest-ListImageFiltersTestJSON-1898919175 tempest-ListImageFiltersTestJSON-1898919175-project-member] [instance: f0fefd82-1670-4bbb-b250-da0c3b6ca3f6] Aborting claim: {{(pid=62503) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 652.286999] env[62503]: DEBUG oslo_concurrency.lockutils [None req-fa417a80-55e3-4106-aabc-7d0e3e09907d tempest-ListImageFiltersTestJSON-1898919175 tempest-ListImageFiltersTestJSON-1898919175-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 652.316087] env[62503]: DEBUG nova.network.neutron [req-114718b6-7118-4323-9306-5cf02f893d4b req-8848f8c5-c460-4396-9252-af04ac500ea5 service nova] [instance: f0fefd82-1670-4bbb-b250-da0c3b6ca3f6] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 652.381614] env[62503]: DEBUG nova.network.neutron [req-114718b6-7118-4323-9306-5cf02f893d4b req-8848f8c5-c460-4396-9252-af04ac500ea5 service nova] [instance: f0fefd82-1670-4bbb-b250-da0c3b6ca3f6] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 652.504950] env[62503]: DEBUG oslo_concurrency.lockutils [None req-812ef84d-18c3-4e01-ae3f-ea5c4ae1494f tempest-ListImageFiltersTestJSON-1898919175 tempest-ListImageFiltersTestJSON-1898919175-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.502s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 652.505480] env[62503]: DEBUG nova.compute.manager [None req-812ef84d-18c3-4e01-ae3f-ea5c4ae1494f tempest-ListImageFiltersTestJSON-1898919175 tempest-ListImageFiltersTestJSON-1898919175-project-member] [instance: c32f170d-1e88-4716-a02a-b8db6896e900] Start building networks asynchronously for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2832}} [ 652.508125] env[62503]: DEBUG oslo_concurrency.lockutils [None req-b638fdbf-443c-4fc0-ac4f-b9d527537434 tempest-ImagesNegativeTestJSON-829730830 tempest-ImagesNegativeTestJSON-829730830-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.802s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 652.511830] env[62503]: INFO nova.compute.claims [None req-b638fdbf-443c-4fc0-ac4f-b9d527537434 tempest-ImagesNegativeTestJSON-829730830 tempest-ImagesNegativeTestJSON-829730830-project-member] [instance: e49252e3-11cc-49c3-b959-24ad87ad48c9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 652.615333] env[62503]: DEBUG nova.network.neutron [None req-856c8a58-cdb0-4f72-ae8c-c9a975e5795d tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: 0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 652.706774] env[62503]: DEBUG nova.network.neutron [None req-856c8a58-cdb0-4f72-ae8c-c9a975e5795d tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: 0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 652.884509] env[62503]: DEBUG oslo_concurrency.lockutils [req-114718b6-7118-4323-9306-5cf02f893d4b req-8848f8c5-c460-4396-9252-af04ac500ea5 service nova] Releasing lock "refresh_cache-f0fefd82-1670-4bbb-b250-da0c3b6ca3f6" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 652.885018] env[62503]: DEBUG nova.compute.manager [req-114718b6-7118-4323-9306-5cf02f893d4b req-8848f8c5-c460-4396-9252-af04ac500ea5 service nova] [instance: f0fefd82-1670-4bbb-b250-da0c3b6ca3f6] Received event network-vif-deleted-73b93393-7850-45ae-977f-e26fb12e6842 {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 653.015712] env[62503]: DEBUG nova.compute.utils [None req-812ef84d-18c3-4e01-ae3f-ea5c4ae1494f tempest-ListImageFiltersTestJSON-1898919175 tempest-ListImageFiltersTestJSON-1898919175-project-member] Using /dev/sd instead of None {{(pid=62503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 653.017774] env[62503]: DEBUG nova.compute.manager [None req-812ef84d-18c3-4e01-ae3f-ea5c4ae1494f tempest-ListImageFiltersTestJSON-1898919175 tempest-ListImageFiltersTestJSON-1898919175-project-member] [instance: c32f170d-1e88-4716-a02a-b8db6896e900] Allocating IP information in the background. {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 653.018520] env[62503]: DEBUG nova.network.neutron [None req-812ef84d-18c3-4e01-ae3f-ea5c4ae1494f tempest-ListImageFiltersTestJSON-1898919175 tempest-ListImageFiltersTestJSON-1898919175-project-member] [instance: c32f170d-1e88-4716-a02a-b8db6896e900] allocate_for_instance() {{(pid=62503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 653.080908] env[62503]: DEBUG nova.policy [None req-812ef84d-18c3-4e01-ae3f-ea5c4ae1494f tempest-ListImageFiltersTestJSON-1898919175 tempest-ListImageFiltersTestJSON-1898919175-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b4a4fa5de8b24961bddc8c2756e4a1ac', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f2c4dd257d3d4101b6ea78c1b4188498', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62503) authorize /opt/stack/nova/nova/policy.py:201}} [ 653.210649] env[62503]: DEBUG oslo_concurrency.lockutils [None req-856c8a58-cdb0-4f72-ae8c-c9a975e5795d tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Releasing lock "refresh_cache-0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 653.210649] env[62503]: DEBUG nova.compute.manager [None req-856c8a58-cdb0-4f72-ae8c-c9a975e5795d tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: 0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4] Start destroying the instance on the hypervisor. {{(pid=62503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3156}} [ 653.210774] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-856c8a58-cdb0-4f72-ae8c-c9a975e5795d tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: 0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4] Destroying instance {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 653.211247] env[62503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7ae7f248-5095-456d-822f-3b6c74209b72 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.220808] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16516137-0300-45c2-8b43-b754fc36b390 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.246246] env[62503]: WARNING nova.virt.vmwareapi.vmops [None req-856c8a58-cdb0-4f72-ae8c-c9a975e5795d tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: 0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4 could not be found. [ 653.246246] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-856c8a58-cdb0-4f72-ae8c-c9a975e5795d tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: 0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4] Instance destroyed {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 653.246246] env[62503]: INFO nova.compute.manager [None req-856c8a58-cdb0-4f72-ae8c-c9a975e5795d tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: 0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4] Took 0.03 seconds to destroy the instance on the hypervisor. [ 653.246246] env[62503]: DEBUG oslo.service.loopingcall [None req-856c8a58-cdb0-4f72-ae8c-c9a975e5795d tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 653.246246] env[62503]: DEBUG nova.compute.manager [-] [instance: 0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 653.246246] env[62503]: DEBUG nova.network.neutron [-] [instance: 0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 653.271214] env[62503]: DEBUG nova.network.neutron [-] [instance: 0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 653.454900] env[62503]: DEBUG nova.network.neutron [None req-812ef84d-18c3-4e01-ae3f-ea5c4ae1494f tempest-ListImageFiltersTestJSON-1898919175 tempest-ListImageFiltersTestJSON-1898919175-project-member] [instance: c32f170d-1e88-4716-a02a-b8db6896e900] Successfully created port: 25850fde-64a4-4bf1-8627-3edfdbe50acd {{(pid=62503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 653.519141] env[62503]: DEBUG nova.compute.manager [None req-812ef84d-18c3-4e01-ae3f-ea5c4ae1494f tempest-ListImageFiltersTestJSON-1898919175 tempest-ListImageFiltersTestJSON-1898919175-project-member] [instance: c32f170d-1e88-4716-a02a-b8db6896e900] Start building block device mappings for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2867}} [ 653.774050] env[62503]: DEBUG nova.network.neutron [-] [instance: 0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 653.826114] env[62503]: DEBUG nova.compute.manager [req-cb38a129-37ee-4a3e-83e7-05d99f3e4b04 req-606ae295-3961-4975-ab11-4e4bd97cba1a service nova] [instance: 0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4] Received event network-changed-875038d9-e4a3-44de-82ca-eaa4a28ed28c {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 653.826309] env[62503]: DEBUG nova.compute.manager [req-cb38a129-37ee-4a3e-83e7-05d99f3e4b04 req-606ae295-3961-4975-ab11-4e4bd97cba1a service nova] [instance: 0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4] Refreshing instance network info cache due to event network-changed-875038d9-e4a3-44de-82ca-eaa4a28ed28c. {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11432}} [ 653.826534] env[62503]: DEBUG oslo_concurrency.lockutils [req-cb38a129-37ee-4a3e-83e7-05d99f3e4b04 req-606ae295-3961-4975-ab11-4e4bd97cba1a service nova] Acquiring lock "refresh_cache-0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 653.826678] env[62503]: DEBUG oslo_concurrency.lockutils [req-cb38a129-37ee-4a3e-83e7-05d99f3e4b04 req-606ae295-3961-4975-ab11-4e4bd97cba1a service nova] Acquired lock "refresh_cache-0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 653.826837] env[62503]: DEBUG nova.network.neutron [req-cb38a129-37ee-4a3e-83e7-05d99f3e4b04 req-606ae295-3961-4975-ab11-4e4bd97cba1a service nova] [instance: 0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4] Refreshing network info cache for port 875038d9-e4a3-44de-82ca-eaa4a28ed28c {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 653.863831] env[62503]: DEBUG oslo_concurrency.lockutils [None req-be269e0c-079d-42ba-b169-f1c6935976f8 tempest-ServerMetadataTestJSON-2107799143 tempest-ServerMetadataTestJSON-2107799143-project-member] Acquiring lock "ff56659a-18f8-44c5-ab10-872e636a9357" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 653.863831] env[62503]: DEBUG oslo_concurrency.lockutils [None req-be269e0c-079d-42ba-b169-f1c6935976f8 tempest-ServerMetadataTestJSON-2107799143 tempest-ServerMetadataTestJSON-2107799143-project-member] Lock "ff56659a-18f8-44c5-ab10-872e636a9357" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 654.018381] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cced8c83-d6b4-4f01-a754-0ee6ea5f7a23 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.026631] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06355020-03d7-4718-8093-3f5ece857698 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.061642] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d941151a-13a6-458f-9e6a-430662eb82f0 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.069540] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bd67a58-be14-49f8-a09d-7bfb86c4956b {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.086378] env[62503]: DEBUG nova.compute.provider_tree [None req-b638fdbf-443c-4fc0-ac4f-b9d527537434 tempest-ImagesNegativeTestJSON-829730830 tempest-ImagesNegativeTestJSON-829730830-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 654.276598] env[62503]: INFO nova.compute.manager [-] [instance: 0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4] Took 1.03 seconds to deallocate network for instance. [ 654.279227] env[62503]: DEBUG nova.compute.claims [None req-856c8a58-cdb0-4f72-ae8c-c9a975e5795d tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: 0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4] Aborting claim: {{(pid=62503) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 654.279420] env[62503]: DEBUG oslo_concurrency.lockutils [None req-856c8a58-cdb0-4f72-ae8c-c9a975e5795d tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 654.364759] env[62503]: DEBUG nova.network.neutron [req-cb38a129-37ee-4a3e-83e7-05d99f3e4b04 req-606ae295-3961-4975-ab11-4e4bd97cba1a service nova] [instance: 0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 654.473451] env[62503]: DEBUG nova.network.neutron [req-cb38a129-37ee-4a3e-83e7-05d99f3e4b04 req-606ae295-3961-4975-ab11-4e4bd97cba1a service nova] [instance: 0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 654.564190] env[62503]: DEBUG nova.compute.manager [None req-812ef84d-18c3-4e01-ae3f-ea5c4ae1494f tempest-ListImageFiltersTestJSON-1898919175 tempest-ListImageFiltersTestJSON-1898919175-project-member] [instance: c32f170d-1e88-4716-a02a-b8db6896e900] Start spawning the instance on the hypervisor. {{(pid=62503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2641}} [ 654.590223] env[62503]: DEBUG nova.scheduler.client.report [None req-b638fdbf-443c-4fc0-ac4f-b9d527537434 tempest-ImagesNegativeTestJSON-829730830 tempest-ImagesNegativeTestJSON-829730830-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 654.606372] env[62503]: DEBUG nova.virt.hardware [None req-812ef84d-18c3-4e01-ae3f-ea5c4ae1494f tempest-ListImageFiltersTestJSON-1898919175 tempest-ListImageFiltersTestJSON-1898919175-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:26:20Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:26:03Z,direct_url=,disk_format='vmdk',id=8150ca02-f879-471d-8913-459408f127a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26d9ee75d25b4018b8bb1fb11c8bd98c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:26:04Z,virtual_size=,visibility=), allow threads: False {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 654.606630] env[62503]: DEBUG nova.virt.hardware [None req-812ef84d-18c3-4e01-ae3f-ea5c4ae1494f tempest-ListImageFiltersTestJSON-1898919175 tempest-ListImageFiltersTestJSON-1898919175-project-member] Flavor limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 654.606787] env[62503]: DEBUG nova.virt.hardware [None req-812ef84d-18c3-4e01-ae3f-ea5c4ae1494f tempest-ListImageFiltersTestJSON-1898919175 tempest-ListImageFiltersTestJSON-1898919175-project-member] Image limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 654.606967] env[62503]: DEBUG nova.virt.hardware [None req-812ef84d-18c3-4e01-ae3f-ea5c4ae1494f tempest-ListImageFiltersTestJSON-1898919175 tempest-ListImageFiltersTestJSON-1898919175-project-member] Flavor pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 654.607215] env[62503]: DEBUG nova.virt.hardware [None req-812ef84d-18c3-4e01-ae3f-ea5c4ae1494f tempest-ListImageFiltersTestJSON-1898919175 tempest-ListImageFiltersTestJSON-1898919175-project-member] Image pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 654.607342] env[62503]: DEBUG nova.virt.hardware [None req-812ef84d-18c3-4e01-ae3f-ea5c4ae1494f tempest-ListImageFiltersTestJSON-1898919175 tempest-ListImageFiltersTestJSON-1898919175-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 654.608219] env[62503]: DEBUG nova.virt.hardware [None req-812ef84d-18c3-4e01-ae3f-ea5c4ae1494f tempest-ListImageFiltersTestJSON-1898919175 tempest-ListImageFiltersTestJSON-1898919175-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 654.608541] env[62503]: DEBUG nova.virt.hardware [None req-812ef84d-18c3-4e01-ae3f-ea5c4ae1494f tempest-ListImageFiltersTestJSON-1898919175 tempest-ListImageFiltersTestJSON-1898919175-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 654.608743] env[62503]: DEBUG nova.virt.hardware [None req-812ef84d-18c3-4e01-ae3f-ea5c4ae1494f tempest-ListImageFiltersTestJSON-1898919175 tempest-ListImageFiltersTestJSON-1898919175-project-member] Got 1 possible topologies {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 654.608912] env[62503]: DEBUG nova.virt.hardware [None req-812ef84d-18c3-4e01-ae3f-ea5c4ae1494f tempest-ListImageFiltersTestJSON-1898919175 tempest-ListImageFiltersTestJSON-1898919175-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 654.609262] env[62503]: DEBUG nova.virt.hardware [None req-812ef84d-18c3-4e01-ae3f-ea5c4ae1494f tempest-ListImageFiltersTestJSON-1898919175 tempest-ListImageFiltersTestJSON-1898919175-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 654.610648] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-309a743f-445e-4c57-acdd-e91df769d4bb {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.625213] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62d36231-8cf9-4115-9e1d-457638d53a5c {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.652410] env[62503]: ERROR nova.compute.manager [None req-812ef84d-18c3-4e01-ae3f-ea5c4ae1494f tempest-ListImageFiltersTestJSON-1898919175 tempest-ListImageFiltersTestJSON-1898919175-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 25850fde-64a4-4bf1-8627-3edfdbe50acd, please check neutron logs for more information. [ 654.652410] env[62503]: ERROR nova.compute.manager Traceback (most recent call last): [ 654.652410] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 654.652410] env[62503]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 654.652410] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 654.652410] env[62503]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 654.652410] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 654.652410] env[62503]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 654.652410] env[62503]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 654.652410] env[62503]: ERROR nova.compute.manager self.force_reraise() [ 654.652410] env[62503]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 654.652410] env[62503]: ERROR nova.compute.manager raise self.value [ 654.652410] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 654.652410] env[62503]: ERROR nova.compute.manager updated_port = self._update_port( [ 654.652410] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 654.652410] env[62503]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 654.652912] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 654.652912] env[62503]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 654.652912] env[62503]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 25850fde-64a4-4bf1-8627-3edfdbe50acd, please check neutron logs for more information. [ 654.652912] env[62503]: ERROR nova.compute.manager [ 654.652912] env[62503]: Traceback (most recent call last): [ 654.652912] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 654.652912] env[62503]: listener.cb(fileno) [ 654.652912] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 654.652912] env[62503]: result = function(*args, **kwargs) [ 654.652912] env[62503]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 654.652912] env[62503]: return func(*args, **kwargs) [ 654.652912] env[62503]: File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 654.652912] env[62503]: raise e [ 654.652912] env[62503]: File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 654.652912] env[62503]: nwinfo = self.network_api.allocate_for_instance( [ 654.652912] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 654.652912] env[62503]: created_port_ids = self._update_ports_for_instance( [ 654.652912] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 654.652912] env[62503]: with excutils.save_and_reraise_exception(): [ 654.652912] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 654.652912] env[62503]: self.force_reraise() [ 654.652912] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 654.652912] env[62503]: raise self.value [ 654.652912] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 654.652912] env[62503]: updated_port = self._update_port( [ 654.652912] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 654.652912] env[62503]: _ensure_no_port_binding_failure(port) [ 654.652912] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 654.652912] env[62503]: raise exception.PortBindingFailed(port_id=port['id']) [ 654.653745] env[62503]: nova.exception.PortBindingFailed: Binding failed for port 25850fde-64a4-4bf1-8627-3edfdbe50acd, please check neutron logs for more information. [ 654.653745] env[62503]: Removing descriptor: 16 [ 654.653745] env[62503]: ERROR nova.compute.manager [None req-812ef84d-18c3-4e01-ae3f-ea5c4ae1494f tempest-ListImageFiltersTestJSON-1898919175 tempest-ListImageFiltersTestJSON-1898919175-project-member] [instance: c32f170d-1e88-4716-a02a-b8db6896e900] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 25850fde-64a4-4bf1-8627-3edfdbe50acd, please check neutron logs for more information. [ 654.653745] env[62503]: ERROR nova.compute.manager [instance: c32f170d-1e88-4716-a02a-b8db6896e900] Traceback (most recent call last): [ 654.653745] env[62503]: ERROR nova.compute.manager [instance: c32f170d-1e88-4716-a02a-b8db6896e900] File "/opt/stack/nova/nova/compute/manager.py", line 2897, in _build_resources [ 654.653745] env[62503]: ERROR nova.compute.manager [instance: c32f170d-1e88-4716-a02a-b8db6896e900] yield resources [ 654.653745] env[62503]: ERROR nova.compute.manager [instance: c32f170d-1e88-4716-a02a-b8db6896e900] File "/opt/stack/nova/nova/compute/manager.py", line 2644, in _build_and_run_instance [ 654.653745] env[62503]: ERROR nova.compute.manager [instance: c32f170d-1e88-4716-a02a-b8db6896e900] self.driver.spawn(context, instance, image_meta, [ 654.653745] env[62503]: ERROR nova.compute.manager [instance: c32f170d-1e88-4716-a02a-b8db6896e900] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 654.653745] env[62503]: ERROR nova.compute.manager [instance: c32f170d-1e88-4716-a02a-b8db6896e900] self._vmops.spawn(context, instance, image_meta, injected_files, [ 654.653745] env[62503]: ERROR nova.compute.manager [instance: c32f170d-1e88-4716-a02a-b8db6896e900] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 654.653745] env[62503]: ERROR nova.compute.manager [instance: c32f170d-1e88-4716-a02a-b8db6896e900] vm_ref = self.build_virtual_machine(instance, [ 654.654061] env[62503]: ERROR nova.compute.manager [instance: c32f170d-1e88-4716-a02a-b8db6896e900] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 654.654061] env[62503]: ERROR nova.compute.manager [instance: c32f170d-1e88-4716-a02a-b8db6896e900] vif_infos = vmwarevif.get_vif_info(self._session, [ 654.654061] env[62503]: ERROR nova.compute.manager [instance: c32f170d-1e88-4716-a02a-b8db6896e900] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 654.654061] env[62503]: ERROR nova.compute.manager [instance: c32f170d-1e88-4716-a02a-b8db6896e900] for vif in network_info: [ 654.654061] env[62503]: ERROR nova.compute.manager [instance: c32f170d-1e88-4716-a02a-b8db6896e900] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 654.654061] env[62503]: ERROR nova.compute.manager [instance: c32f170d-1e88-4716-a02a-b8db6896e900] return self._sync_wrapper(fn, *args, **kwargs) [ 654.654061] env[62503]: ERROR nova.compute.manager [instance: c32f170d-1e88-4716-a02a-b8db6896e900] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 654.654061] env[62503]: ERROR nova.compute.manager [instance: c32f170d-1e88-4716-a02a-b8db6896e900] self.wait() [ 654.654061] env[62503]: ERROR nova.compute.manager [instance: c32f170d-1e88-4716-a02a-b8db6896e900] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 654.654061] env[62503]: ERROR nova.compute.manager [instance: c32f170d-1e88-4716-a02a-b8db6896e900] self[:] = self._gt.wait() [ 654.654061] env[62503]: ERROR nova.compute.manager [instance: c32f170d-1e88-4716-a02a-b8db6896e900] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 654.654061] env[62503]: ERROR nova.compute.manager [instance: c32f170d-1e88-4716-a02a-b8db6896e900] return self._exit_event.wait() [ 654.654061] env[62503]: ERROR nova.compute.manager [instance: c32f170d-1e88-4716-a02a-b8db6896e900] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 654.654368] env[62503]: ERROR nova.compute.manager [instance: c32f170d-1e88-4716-a02a-b8db6896e900] result = hub.switch() [ 654.654368] env[62503]: ERROR nova.compute.manager [instance: c32f170d-1e88-4716-a02a-b8db6896e900] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 654.654368] env[62503]: ERROR nova.compute.manager [instance: c32f170d-1e88-4716-a02a-b8db6896e900] return self.greenlet.switch() [ 654.654368] env[62503]: ERROR nova.compute.manager [instance: c32f170d-1e88-4716-a02a-b8db6896e900] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 654.654368] env[62503]: ERROR nova.compute.manager [instance: c32f170d-1e88-4716-a02a-b8db6896e900] result = function(*args, **kwargs) [ 654.654368] env[62503]: ERROR nova.compute.manager [instance: c32f170d-1e88-4716-a02a-b8db6896e900] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 654.654368] env[62503]: ERROR nova.compute.manager [instance: c32f170d-1e88-4716-a02a-b8db6896e900] return func(*args, **kwargs) [ 654.654368] env[62503]: ERROR nova.compute.manager [instance: c32f170d-1e88-4716-a02a-b8db6896e900] File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 654.654368] env[62503]: ERROR nova.compute.manager [instance: c32f170d-1e88-4716-a02a-b8db6896e900] raise e [ 654.654368] env[62503]: ERROR nova.compute.manager [instance: c32f170d-1e88-4716-a02a-b8db6896e900] File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 654.654368] env[62503]: ERROR nova.compute.manager [instance: c32f170d-1e88-4716-a02a-b8db6896e900] nwinfo = self.network_api.allocate_for_instance( [ 654.654368] env[62503]: ERROR nova.compute.manager [instance: c32f170d-1e88-4716-a02a-b8db6896e900] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 654.654368] env[62503]: ERROR nova.compute.manager [instance: c32f170d-1e88-4716-a02a-b8db6896e900] created_port_ids = self._update_ports_for_instance( [ 654.654849] env[62503]: ERROR nova.compute.manager [instance: c32f170d-1e88-4716-a02a-b8db6896e900] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 654.654849] env[62503]: ERROR nova.compute.manager [instance: c32f170d-1e88-4716-a02a-b8db6896e900] with excutils.save_and_reraise_exception(): [ 654.654849] env[62503]: ERROR nova.compute.manager [instance: c32f170d-1e88-4716-a02a-b8db6896e900] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 654.654849] env[62503]: ERROR nova.compute.manager [instance: c32f170d-1e88-4716-a02a-b8db6896e900] self.force_reraise() [ 654.654849] env[62503]: ERROR nova.compute.manager [instance: c32f170d-1e88-4716-a02a-b8db6896e900] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 654.654849] env[62503]: ERROR nova.compute.manager [instance: c32f170d-1e88-4716-a02a-b8db6896e900] raise self.value [ 654.654849] env[62503]: ERROR nova.compute.manager [instance: c32f170d-1e88-4716-a02a-b8db6896e900] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 654.654849] env[62503]: ERROR nova.compute.manager [instance: c32f170d-1e88-4716-a02a-b8db6896e900] updated_port = self._update_port( [ 654.654849] env[62503]: ERROR nova.compute.manager [instance: c32f170d-1e88-4716-a02a-b8db6896e900] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 654.654849] env[62503]: ERROR nova.compute.manager [instance: c32f170d-1e88-4716-a02a-b8db6896e900] _ensure_no_port_binding_failure(port) [ 654.654849] env[62503]: ERROR nova.compute.manager [instance: c32f170d-1e88-4716-a02a-b8db6896e900] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 654.654849] env[62503]: ERROR nova.compute.manager [instance: c32f170d-1e88-4716-a02a-b8db6896e900] raise exception.PortBindingFailed(port_id=port['id']) [ 654.655137] env[62503]: ERROR nova.compute.manager [instance: c32f170d-1e88-4716-a02a-b8db6896e900] nova.exception.PortBindingFailed: Binding failed for port 25850fde-64a4-4bf1-8627-3edfdbe50acd, please check neutron logs for more information. [ 654.655137] env[62503]: ERROR nova.compute.manager [instance: c32f170d-1e88-4716-a02a-b8db6896e900] [ 654.655137] env[62503]: INFO nova.compute.manager [None req-812ef84d-18c3-4e01-ae3f-ea5c4ae1494f tempest-ListImageFiltersTestJSON-1898919175 tempest-ListImageFiltersTestJSON-1898919175-project-member] [instance: c32f170d-1e88-4716-a02a-b8db6896e900] Terminating instance [ 654.656241] env[62503]: DEBUG oslo_concurrency.lockutils [None req-812ef84d-18c3-4e01-ae3f-ea5c4ae1494f tempest-ListImageFiltersTestJSON-1898919175 tempest-ListImageFiltersTestJSON-1898919175-project-member] Acquiring lock "refresh_cache-c32f170d-1e88-4716-a02a-b8db6896e900" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 654.656454] env[62503]: DEBUG oslo_concurrency.lockutils [None req-812ef84d-18c3-4e01-ae3f-ea5c4ae1494f tempest-ListImageFiltersTestJSON-1898919175 tempest-ListImageFiltersTestJSON-1898919175-project-member] Acquired lock "refresh_cache-c32f170d-1e88-4716-a02a-b8db6896e900" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 654.656565] env[62503]: DEBUG nova.network.neutron [None req-812ef84d-18c3-4e01-ae3f-ea5c4ae1494f tempest-ListImageFiltersTestJSON-1898919175 tempest-ListImageFiltersTestJSON-1898919175-project-member] [instance: c32f170d-1e88-4716-a02a-b8db6896e900] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 654.974056] env[62503]: DEBUG oslo_concurrency.lockutils [req-cb38a129-37ee-4a3e-83e7-05d99f3e4b04 req-606ae295-3961-4975-ab11-4e4bd97cba1a service nova] Releasing lock "refresh_cache-0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 654.974356] env[62503]: DEBUG nova.compute.manager [req-cb38a129-37ee-4a3e-83e7-05d99f3e4b04 req-606ae295-3961-4975-ab11-4e4bd97cba1a service nova] [instance: 0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4] Received event network-vif-deleted-875038d9-e4a3-44de-82ca-eaa4a28ed28c {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 655.099567] env[62503]: DEBUG oslo_concurrency.lockutils [None req-b638fdbf-443c-4fc0-ac4f-b9d527537434 tempest-ImagesNegativeTestJSON-829730830 tempest-ImagesNegativeTestJSON-829730830-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.591s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 655.100121] env[62503]: DEBUG nova.compute.manager [None req-b638fdbf-443c-4fc0-ac4f-b9d527537434 tempest-ImagesNegativeTestJSON-829730830 tempest-ImagesNegativeTestJSON-829730830-project-member] [instance: e49252e3-11cc-49c3-b959-24ad87ad48c9] Start building networks asynchronously for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2832}} [ 655.102908] env[62503]: DEBUG oslo_concurrency.lockutils [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 18.287s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 655.103116] env[62503]: DEBUG oslo_concurrency.lockutils [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 655.103290] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62503) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 655.103554] env[62503]: DEBUG oslo_concurrency.lockutils [None req-f5db60c5-fc27-48c4-aa82-4f7bd2779675 tempest-ServersTestBootFromVolume-938742055 tempest-ServersTestBootFromVolume-938742055-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.402s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 655.105044] env[62503]: INFO nova.compute.claims [None req-f5db60c5-fc27-48c4-aa82-4f7bd2779675 tempest-ServersTestBootFromVolume-938742055 tempest-ServersTestBootFromVolume-938742055-project-member] [instance: 5ca1a33c-7324-481c-95cd-3761ce8ccf13] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 655.108600] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93433815-8642-4243-a9d0-4bf424aa6459 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.116915] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49594401-5d2a-4113-9f91-63923c31e265 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.130561] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51f3a12e-6c87-46eb-98b6-4c196b88b135 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.137709] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-febfef1a-5ec1-420a-a996-73875b5e3653 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.170041] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181237MB free_disk=176GB free_vcpus=48 pci_devices=None {{(pid=62503) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 655.170198] env[62503]: DEBUG oslo_concurrency.lockutils [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 655.186598] env[62503]: DEBUG nova.network.neutron [None req-812ef84d-18c3-4e01-ae3f-ea5c4ae1494f tempest-ListImageFiltersTestJSON-1898919175 tempest-ListImageFiltersTestJSON-1898919175-project-member] [instance: c32f170d-1e88-4716-a02a-b8db6896e900] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 655.242813] env[62503]: DEBUG nova.network.neutron [None req-812ef84d-18c3-4e01-ae3f-ea5c4ae1494f tempest-ListImageFiltersTestJSON-1898919175 tempest-ListImageFiltersTestJSON-1898919175-project-member] [instance: c32f170d-1e88-4716-a02a-b8db6896e900] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 655.612209] env[62503]: DEBUG nova.compute.utils [None req-b638fdbf-443c-4fc0-ac4f-b9d527537434 tempest-ImagesNegativeTestJSON-829730830 tempest-ImagesNegativeTestJSON-829730830-project-member] Using /dev/sd instead of None {{(pid=62503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 655.613798] env[62503]: DEBUG nova.compute.manager [None req-b638fdbf-443c-4fc0-ac4f-b9d527537434 tempest-ImagesNegativeTestJSON-829730830 tempest-ImagesNegativeTestJSON-829730830-project-member] [instance: e49252e3-11cc-49c3-b959-24ad87ad48c9] Allocating IP information in the background. {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 655.613798] env[62503]: DEBUG nova.network.neutron [None req-b638fdbf-443c-4fc0-ac4f-b9d527537434 tempest-ImagesNegativeTestJSON-829730830 tempest-ImagesNegativeTestJSON-829730830-project-member] [instance: e49252e3-11cc-49c3-b959-24ad87ad48c9] allocate_for_instance() {{(pid=62503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 655.663204] env[62503]: DEBUG nova.policy [None req-b638fdbf-443c-4fc0-ac4f-b9d527537434 tempest-ImagesNegativeTestJSON-829730830 tempest-ImagesNegativeTestJSON-829730830-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '73f24827427c46f6a77b1bec996a1355', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'cf42b935c4ad41daa4677f545933c91d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62503) authorize /opt/stack/nova/nova/policy.py:201}} [ 655.745023] env[62503]: DEBUG oslo_concurrency.lockutils [None req-812ef84d-18c3-4e01-ae3f-ea5c4ae1494f tempest-ListImageFiltersTestJSON-1898919175 tempest-ListImageFiltersTestJSON-1898919175-project-member] Releasing lock "refresh_cache-c32f170d-1e88-4716-a02a-b8db6896e900" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 655.745488] env[62503]: DEBUG nova.compute.manager [None req-812ef84d-18c3-4e01-ae3f-ea5c4ae1494f tempest-ListImageFiltersTestJSON-1898919175 tempest-ListImageFiltersTestJSON-1898919175-project-member] [instance: c32f170d-1e88-4716-a02a-b8db6896e900] Start destroying the instance on the hypervisor. {{(pid=62503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3156}} [ 655.745759] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-812ef84d-18c3-4e01-ae3f-ea5c4ae1494f tempest-ListImageFiltersTestJSON-1898919175 tempest-ListImageFiltersTestJSON-1898919175-project-member] [instance: c32f170d-1e88-4716-a02a-b8db6896e900] Destroying instance {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 655.745977] env[62503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1f185455-4e54-4bad-8021-60b998e87639 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.757055] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e81e9f08-d074-4e6c-8c73-8ea6cc3dc2ea {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.787840] env[62503]: WARNING nova.virt.vmwareapi.vmops [None req-812ef84d-18c3-4e01-ae3f-ea5c4ae1494f tempest-ListImageFiltersTestJSON-1898919175 tempest-ListImageFiltersTestJSON-1898919175-project-member] [instance: c32f170d-1e88-4716-a02a-b8db6896e900] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance c32f170d-1e88-4716-a02a-b8db6896e900 could not be found. [ 655.788193] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-812ef84d-18c3-4e01-ae3f-ea5c4ae1494f tempest-ListImageFiltersTestJSON-1898919175 tempest-ListImageFiltersTestJSON-1898919175-project-member] [instance: c32f170d-1e88-4716-a02a-b8db6896e900] Instance destroyed {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 655.788551] env[62503]: INFO nova.compute.manager [None req-812ef84d-18c3-4e01-ae3f-ea5c4ae1494f tempest-ListImageFiltersTestJSON-1898919175 tempest-ListImageFiltersTestJSON-1898919175-project-member] [instance: c32f170d-1e88-4716-a02a-b8db6896e900] Took 0.04 seconds to destroy the instance on the hypervisor. [ 655.788868] env[62503]: DEBUG oslo.service.loopingcall [None req-812ef84d-18c3-4e01-ae3f-ea5c4ae1494f tempest-ListImageFiltersTestJSON-1898919175 tempest-ListImageFiltersTestJSON-1898919175-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 655.789197] env[62503]: DEBUG nova.compute.manager [-] [instance: c32f170d-1e88-4716-a02a-b8db6896e900] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 655.789340] env[62503]: DEBUG nova.network.neutron [-] [instance: c32f170d-1e88-4716-a02a-b8db6896e900] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 655.805518] env[62503]: DEBUG nova.network.neutron [-] [instance: c32f170d-1e88-4716-a02a-b8db6896e900] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 655.850371] env[62503]: DEBUG nova.compute.manager [req-21e08d3c-0c7b-487b-9709-4d32de2c0afa req-c7e34369-1d48-4473-9dc5-da04302cc4b9 service nova] [instance: c32f170d-1e88-4716-a02a-b8db6896e900] Received event network-changed-25850fde-64a4-4bf1-8627-3edfdbe50acd {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 655.850570] env[62503]: DEBUG nova.compute.manager [req-21e08d3c-0c7b-487b-9709-4d32de2c0afa req-c7e34369-1d48-4473-9dc5-da04302cc4b9 service nova] [instance: c32f170d-1e88-4716-a02a-b8db6896e900] Refreshing instance network info cache due to event network-changed-25850fde-64a4-4bf1-8627-3edfdbe50acd. {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11432}} [ 655.850781] env[62503]: DEBUG oslo_concurrency.lockutils [req-21e08d3c-0c7b-487b-9709-4d32de2c0afa req-c7e34369-1d48-4473-9dc5-da04302cc4b9 service nova] Acquiring lock "refresh_cache-c32f170d-1e88-4716-a02a-b8db6896e900" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 655.850925] env[62503]: DEBUG oslo_concurrency.lockutils [req-21e08d3c-0c7b-487b-9709-4d32de2c0afa req-c7e34369-1d48-4473-9dc5-da04302cc4b9 service nova] Acquired lock "refresh_cache-c32f170d-1e88-4716-a02a-b8db6896e900" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 655.851099] env[62503]: DEBUG nova.network.neutron [req-21e08d3c-0c7b-487b-9709-4d32de2c0afa req-c7e34369-1d48-4473-9dc5-da04302cc4b9 service nova] [instance: c32f170d-1e88-4716-a02a-b8db6896e900] Refreshing network info cache for port 25850fde-64a4-4bf1-8627-3edfdbe50acd {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 655.962040] env[62503]: DEBUG nova.network.neutron [None req-b638fdbf-443c-4fc0-ac4f-b9d527537434 tempest-ImagesNegativeTestJSON-829730830 tempest-ImagesNegativeTestJSON-829730830-project-member] [instance: e49252e3-11cc-49c3-b959-24ad87ad48c9] Successfully created port: ccf42a2d-595c-4892-aaea-44d57bac1bfb {{(pid=62503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 656.118491] env[62503]: DEBUG nova.compute.manager [None req-b638fdbf-443c-4fc0-ac4f-b9d527537434 tempest-ImagesNegativeTestJSON-829730830 tempest-ImagesNegativeTestJSON-829730830-project-member] [instance: e49252e3-11cc-49c3-b959-24ad87ad48c9] Start building block device mappings for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2867}} [ 656.310331] env[62503]: DEBUG nova.network.neutron [-] [instance: c32f170d-1e88-4716-a02a-b8db6896e900] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 656.379935] env[62503]: DEBUG nova.network.neutron [req-21e08d3c-0c7b-487b-9709-4d32de2c0afa req-c7e34369-1d48-4473-9dc5-da04302cc4b9 service nova] [instance: c32f170d-1e88-4716-a02a-b8db6896e900] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 656.446512] env[62503]: DEBUG nova.network.neutron [req-21e08d3c-0c7b-487b-9709-4d32de2c0afa req-c7e34369-1d48-4473-9dc5-da04302cc4b9 service nova] [instance: c32f170d-1e88-4716-a02a-b8db6896e900] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 656.524476] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9d9edad-49eb-441d-aa8e-dbe600823fb4 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.533146] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d8c2a1e-3578-464d-ad3a-5b7423a32bb6 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.562735] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d19bf4f9-d948-4b63-aa1f-84ed7066b2d6 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.569979] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5705e533-f69a-45d7-b98c-0820def04d9f {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.583131] env[62503]: DEBUG nova.compute.provider_tree [None req-f5db60c5-fc27-48c4-aa82-4f7bd2779675 tempest-ServersTestBootFromVolume-938742055 tempest-ServersTestBootFromVolume-938742055-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 656.815986] env[62503]: INFO nova.compute.manager [-] [instance: c32f170d-1e88-4716-a02a-b8db6896e900] Took 1.03 seconds to deallocate network for instance. [ 656.818587] env[62503]: DEBUG nova.compute.claims [None req-812ef84d-18c3-4e01-ae3f-ea5c4ae1494f tempest-ListImageFiltersTestJSON-1898919175 tempest-ListImageFiltersTestJSON-1898919175-project-member] [instance: c32f170d-1e88-4716-a02a-b8db6896e900] Aborting claim: {{(pid=62503) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 656.818779] env[62503]: DEBUG oslo_concurrency.lockutils [None req-812ef84d-18c3-4e01-ae3f-ea5c4ae1494f tempest-ListImageFiltersTestJSON-1898919175 tempest-ListImageFiltersTestJSON-1898919175-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 656.948723] env[62503]: ERROR nova.compute.manager [None req-b638fdbf-443c-4fc0-ac4f-b9d527537434 tempest-ImagesNegativeTestJSON-829730830 tempest-ImagesNegativeTestJSON-829730830-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port ccf42a2d-595c-4892-aaea-44d57bac1bfb, please check neutron logs for more information. [ 656.948723] env[62503]: ERROR nova.compute.manager Traceback (most recent call last): [ 656.948723] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 656.948723] env[62503]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 656.948723] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 656.948723] env[62503]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 656.948723] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 656.948723] env[62503]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 656.948723] env[62503]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 656.948723] env[62503]: ERROR nova.compute.manager self.force_reraise() [ 656.948723] env[62503]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 656.948723] env[62503]: ERROR nova.compute.manager raise self.value [ 656.948723] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 656.948723] env[62503]: ERROR nova.compute.manager updated_port = self._update_port( [ 656.948723] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 656.948723] env[62503]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 656.949139] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 656.949139] env[62503]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 656.949139] env[62503]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port ccf42a2d-595c-4892-aaea-44d57bac1bfb, please check neutron logs for more information. [ 656.949139] env[62503]: ERROR nova.compute.manager [ 656.949139] env[62503]: Traceback (most recent call last): [ 656.949139] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 656.949139] env[62503]: listener.cb(fileno) [ 656.949139] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 656.949139] env[62503]: result = function(*args, **kwargs) [ 656.949139] env[62503]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 656.949139] env[62503]: return func(*args, **kwargs) [ 656.949139] env[62503]: File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 656.949139] env[62503]: raise e [ 656.949139] env[62503]: File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 656.949139] env[62503]: nwinfo = self.network_api.allocate_for_instance( [ 656.949139] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 656.949139] env[62503]: created_port_ids = self._update_ports_for_instance( [ 656.949139] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 656.949139] env[62503]: with excutils.save_and_reraise_exception(): [ 656.949139] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 656.949139] env[62503]: self.force_reraise() [ 656.949139] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 656.949139] env[62503]: raise self.value [ 656.949139] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 656.949139] env[62503]: updated_port = self._update_port( [ 656.949139] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 656.949139] env[62503]: _ensure_no_port_binding_failure(port) [ 656.949139] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 656.949139] env[62503]: raise exception.PortBindingFailed(port_id=port['id']) [ 656.949786] env[62503]: nova.exception.PortBindingFailed: Binding failed for port ccf42a2d-595c-4892-aaea-44d57bac1bfb, please check neutron logs for more information. [ 656.949786] env[62503]: Removing descriptor: 16 [ 656.949786] env[62503]: DEBUG oslo_concurrency.lockutils [req-21e08d3c-0c7b-487b-9709-4d32de2c0afa req-c7e34369-1d48-4473-9dc5-da04302cc4b9 service nova] Releasing lock "refresh_cache-c32f170d-1e88-4716-a02a-b8db6896e900" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 656.949786] env[62503]: DEBUG nova.compute.manager [req-21e08d3c-0c7b-487b-9709-4d32de2c0afa req-c7e34369-1d48-4473-9dc5-da04302cc4b9 service nova] [instance: c32f170d-1e88-4716-a02a-b8db6896e900] Received event network-vif-deleted-25850fde-64a4-4bf1-8627-3edfdbe50acd {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 657.086158] env[62503]: DEBUG nova.scheduler.client.report [None req-f5db60c5-fc27-48c4-aa82-4f7bd2779675 tempest-ServersTestBootFromVolume-938742055 tempest-ServersTestBootFromVolume-938742055-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 657.129569] env[62503]: DEBUG nova.compute.manager [None req-b638fdbf-443c-4fc0-ac4f-b9d527537434 tempest-ImagesNegativeTestJSON-829730830 tempest-ImagesNegativeTestJSON-829730830-project-member] [instance: e49252e3-11cc-49c3-b959-24ad87ad48c9] Start spawning the instance on the hypervisor. {{(pid=62503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2641}} [ 657.155187] env[62503]: DEBUG nova.virt.hardware [None req-b638fdbf-443c-4fc0-ac4f-b9d527537434 tempest-ImagesNegativeTestJSON-829730830 tempest-ImagesNegativeTestJSON-829730830-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:26:20Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:26:03Z,direct_url=,disk_format='vmdk',id=8150ca02-f879-471d-8913-459408f127a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26d9ee75d25b4018b8bb1fb11c8bd98c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:26:04Z,virtual_size=,visibility=), allow threads: False {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 657.155454] env[62503]: DEBUG nova.virt.hardware [None req-b638fdbf-443c-4fc0-ac4f-b9d527537434 tempest-ImagesNegativeTestJSON-829730830 tempest-ImagesNegativeTestJSON-829730830-project-member] Flavor limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 657.155622] env[62503]: DEBUG nova.virt.hardware [None req-b638fdbf-443c-4fc0-ac4f-b9d527537434 tempest-ImagesNegativeTestJSON-829730830 tempest-ImagesNegativeTestJSON-829730830-project-member] Image limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 657.155808] env[62503]: DEBUG nova.virt.hardware [None req-b638fdbf-443c-4fc0-ac4f-b9d527537434 tempest-ImagesNegativeTestJSON-829730830 tempest-ImagesNegativeTestJSON-829730830-project-member] Flavor pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 657.155953] env[62503]: DEBUG nova.virt.hardware [None req-b638fdbf-443c-4fc0-ac4f-b9d527537434 tempest-ImagesNegativeTestJSON-829730830 tempest-ImagesNegativeTestJSON-829730830-project-member] Image pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 657.156112] env[62503]: DEBUG nova.virt.hardware [None req-b638fdbf-443c-4fc0-ac4f-b9d527537434 tempest-ImagesNegativeTestJSON-829730830 tempest-ImagesNegativeTestJSON-829730830-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 657.156316] env[62503]: DEBUG nova.virt.hardware [None req-b638fdbf-443c-4fc0-ac4f-b9d527537434 tempest-ImagesNegativeTestJSON-829730830 tempest-ImagesNegativeTestJSON-829730830-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 657.156475] env[62503]: DEBUG nova.virt.hardware [None req-b638fdbf-443c-4fc0-ac4f-b9d527537434 tempest-ImagesNegativeTestJSON-829730830 tempest-ImagesNegativeTestJSON-829730830-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 657.156644] env[62503]: DEBUG nova.virt.hardware [None req-b638fdbf-443c-4fc0-ac4f-b9d527537434 tempest-ImagesNegativeTestJSON-829730830 tempest-ImagesNegativeTestJSON-829730830-project-member] Got 1 possible topologies {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 657.156805] env[62503]: DEBUG nova.virt.hardware [None req-b638fdbf-443c-4fc0-ac4f-b9d527537434 tempest-ImagesNegativeTestJSON-829730830 tempest-ImagesNegativeTestJSON-829730830-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 657.157014] env[62503]: DEBUG nova.virt.hardware [None req-b638fdbf-443c-4fc0-ac4f-b9d527537434 tempest-ImagesNegativeTestJSON-829730830 tempest-ImagesNegativeTestJSON-829730830-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 657.157909] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75ca58fd-9329-40f3-bb6d-23207d14bf15 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.169524] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-009a37a8-5d8b-41c1-a01b-0819c3bd0c61 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.183165] env[62503]: ERROR nova.compute.manager [None req-b638fdbf-443c-4fc0-ac4f-b9d527537434 tempest-ImagesNegativeTestJSON-829730830 tempest-ImagesNegativeTestJSON-829730830-project-member] [instance: e49252e3-11cc-49c3-b959-24ad87ad48c9] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port ccf42a2d-595c-4892-aaea-44d57bac1bfb, please check neutron logs for more information. [ 657.183165] env[62503]: ERROR nova.compute.manager [instance: e49252e3-11cc-49c3-b959-24ad87ad48c9] Traceback (most recent call last): [ 657.183165] env[62503]: ERROR nova.compute.manager [instance: e49252e3-11cc-49c3-b959-24ad87ad48c9] File "/opt/stack/nova/nova/compute/manager.py", line 2897, in _build_resources [ 657.183165] env[62503]: ERROR nova.compute.manager [instance: e49252e3-11cc-49c3-b959-24ad87ad48c9] yield resources [ 657.183165] env[62503]: ERROR nova.compute.manager [instance: e49252e3-11cc-49c3-b959-24ad87ad48c9] File "/opt/stack/nova/nova/compute/manager.py", line 2644, in _build_and_run_instance [ 657.183165] env[62503]: ERROR nova.compute.manager [instance: e49252e3-11cc-49c3-b959-24ad87ad48c9] self.driver.spawn(context, instance, image_meta, [ 657.183165] env[62503]: ERROR nova.compute.manager [instance: e49252e3-11cc-49c3-b959-24ad87ad48c9] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 657.183165] env[62503]: ERROR nova.compute.manager [instance: e49252e3-11cc-49c3-b959-24ad87ad48c9] self._vmops.spawn(context, instance, image_meta, injected_files, [ 657.183165] env[62503]: ERROR nova.compute.manager [instance: e49252e3-11cc-49c3-b959-24ad87ad48c9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 657.183165] env[62503]: ERROR nova.compute.manager [instance: e49252e3-11cc-49c3-b959-24ad87ad48c9] vm_ref = self.build_virtual_machine(instance, [ 657.183165] env[62503]: ERROR nova.compute.manager [instance: e49252e3-11cc-49c3-b959-24ad87ad48c9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 657.183486] env[62503]: ERROR nova.compute.manager [instance: e49252e3-11cc-49c3-b959-24ad87ad48c9] vif_infos = vmwarevif.get_vif_info(self._session, [ 657.183486] env[62503]: ERROR nova.compute.manager [instance: e49252e3-11cc-49c3-b959-24ad87ad48c9] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 657.183486] env[62503]: ERROR nova.compute.manager [instance: e49252e3-11cc-49c3-b959-24ad87ad48c9] for vif in network_info: [ 657.183486] env[62503]: ERROR nova.compute.manager [instance: e49252e3-11cc-49c3-b959-24ad87ad48c9] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 657.183486] env[62503]: ERROR nova.compute.manager [instance: e49252e3-11cc-49c3-b959-24ad87ad48c9] return self._sync_wrapper(fn, *args, **kwargs) [ 657.183486] env[62503]: ERROR nova.compute.manager [instance: e49252e3-11cc-49c3-b959-24ad87ad48c9] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 657.183486] env[62503]: ERROR nova.compute.manager [instance: e49252e3-11cc-49c3-b959-24ad87ad48c9] self.wait() [ 657.183486] env[62503]: ERROR nova.compute.manager [instance: e49252e3-11cc-49c3-b959-24ad87ad48c9] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 657.183486] env[62503]: ERROR nova.compute.manager [instance: e49252e3-11cc-49c3-b959-24ad87ad48c9] self[:] = self._gt.wait() [ 657.183486] env[62503]: ERROR nova.compute.manager [instance: e49252e3-11cc-49c3-b959-24ad87ad48c9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 657.183486] env[62503]: ERROR nova.compute.manager [instance: e49252e3-11cc-49c3-b959-24ad87ad48c9] return self._exit_event.wait() [ 657.183486] env[62503]: ERROR nova.compute.manager [instance: e49252e3-11cc-49c3-b959-24ad87ad48c9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 657.183486] env[62503]: ERROR nova.compute.manager [instance: e49252e3-11cc-49c3-b959-24ad87ad48c9] current.throw(*self._exc) [ 657.183846] env[62503]: ERROR nova.compute.manager [instance: e49252e3-11cc-49c3-b959-24ad87ad48c9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 657.183846] env[62503]: ERROR nova.compute.manager [instance: e49252e3-11cc-49c3-b959-24ad87ad48c9] result = function(*args, **kwargs) [ 657.183846] env[62503]: ERROR nova.compute.manager [instance: e49252e3-11cc-49c3-b959-24ad87ad48c9] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 657.183846] env[62503]: ERROR nova.compute.manager [instance: e49252e3-11cc-49c3-b959-24ad87ad48c9] return func(*args, **kwargs) [ 657.183846] env[62503]: ERROR nova.compute.manager [instance: e49252e3-11cc-49c3-b959-24ad87ad48c9] File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 657.183846] env[62503]: ERROR nova.compute.manager [instance: e49252e3-11cc-49c3-b959-24ad87ad48c9] raise e [ 657.183846] env[62503]: ERROR nova.compute.manager [instance: e49252e3-11cc-49c3-b959-24ad87ad48c9] File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 657.183846] env[62503]: ERROR nova.compute.manager [instance: e49252e3-11cc-49c3-b959-24ad87ad48c9] nwinfo = self.network_api.allocate_for_instance( [ 657.183846] env[62503]: ERROR nova.compute.manager [instance: e49252e3-11cc-49c3-b959-24ad87ad48c9] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 657.183846] env[62503]: ERROR nova.compute.manager [instance: e49252e3-11cc-49c3-b959-24ad87ad48c9] created_port_ids = self._update_ports_for_instance( [ 657.183846] env[62503]: ERROR nova.compute.manager [instance: e49252e3-11cc-49c3-b959-24ad87ad48c9] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 657.183846] env[62503]: ERROR nova.compute.manager [instance: e49252e3-11cc-49c3-b959-24ad87ad48c9] with excutils.save_and_reraise_exception(): [ 657.183846] env[62503]: ERROR nova.compute.manager [instance: e49252e3-11cc-49c3-b959-24ad87ad48c9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 657.184229] env[62503]: ERROR nova.compute.manager [instance: e49252e3-11cc-49c3-b959-24ad87ad48c9] self.force_reraise() [ 657.184229] env[62503]: ERROR nova.compute.manager [instance: e49252e3-11cc-49c3-b959-24ad87ad48c9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 657.184229] env[62503]: ERROR nova.compute.manager [instance: e49252e3-11cc-49c3-b959-24ad87ad48c9] raise self.value [ 657.184229] env[62503]: ERROR nova.compute.manager [instance: e49252e3-11cc-49c3-b959-24ad87ad48c9] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 657.184229] env[62503]: ERROR nova.compute.manager [instance: e49252e3-11cc-49c3-b959-24ad87ad48c9] updated_port = self._update_port( [ 657.184229] env[62503]: ERROR nova.compute.manager [instance: e49252e3-11cc-49c3-b959-24ad87ad48c9] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 657.184229] env[62503]: ERROR nova.compute.manager [instance: e49252e3-11cc-49c3-b959-24ad87ad48c9] _ensure_no_port_binding_failure(port) [ 657.184229] env[62503]: ERROR nova.compute.manager [instance: e49252e3-11cc-49c3-b959-24ad87ad48c9] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 657.184229] env[62503]: ERROR nova.compute.manager [instance: e49252e3-11cc-49c3-b959-24ad87ad48c9] raise exception.PortBindingFailed(port_id=port['id']) [ 657.184229] env[62503]: ERROR nova.compute.manager [instance: e49252e3-11cc-49c3-b959-24ad87ad48c9] nova.exception.PortBindingFailed: Binding failed for port ccf42a2d-595c-4892-aaea-44d57bac1bfb, please check neutron logs for more information. [ 657.184229] env[62503]: ERROR nova.compute.manager [instance: e49252e3-11cc-49c3-b959-24ad87ad48c9] [ 657.184229] env[62503]: INFO nova.compute.manager [None req-b638fdbf-443c-4fc0-ac4f-b9d527537434 tempest-ImagesNegativeTestJSON-829730830 tempest-ImagesNegativeTestJSON-829730830-project-member] [instance: e49252e3-11cc-49c3-b959-24ad87ad48c9] Terminating instance [ 657.185331] env[62503]: DEBUG oslo_concurrency.lockutils [None req-b638fdbf-443c-4fc0-ac4f-b9d527537434 tempest-ImagesNegativeTestJSON-829730830 tempest-ImagesNegativeTestJSON-829730830-project-member] Acquiring lock "refresh_cache-e49252e3-11cc-49c3-b959-24ad87ad48c9" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 657.185519] env[62503]: DEBUG oslo_concurrency.lockutils [None req-b638fdbf-443c-4fc0-ac4f-b9d527537434 tempest-ImagesNegativeTestJSON-829730830 tempest-ImagesNegativeTestJSON-829730830-project-member] Acquired lock "refresh_cache-e49252e3-11cc-49c3-b959-24ad87ad48c9" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 657.185692] env[62503]: DEBUG nova.network.neutron [None req-b638fdbf-443c-4fc0-ac4f-b9d527537434 tempest-ImagesNegativeTestJSON-829730830 tempest-ImagesNegativeTestJSON-829730830-project-member] [instance: e49252e3-11cc-49c3-b959-24ad87ad48c9] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 657.591392] env[62503]: DEBUG oslo_concurrency.lockutils [None req-f5db60c5-fc27-48c4-aa82-4f7bd2779675 tempest-ServersTestBootFromVolume-938742055 tempest-ServersTestBootFromVolume-938742055-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.488s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 657.591785] env[62503]: DEBUG nova.compute.manager [None req-f5db60c5-fc27-48c4-aa82-4f7bd2779675 tempest-ServersTestBootFromVolume-938742055 tempest-ServersTestBootFromVolume-938742055-project-member] [instance: 5ca1a33c-7324-481c-95cd-3761ce8ccf13] Start building networks asynchronously for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2832}} [ 657.596391] env[62503]: DEBUG oslo_concurrency.lockutils [None req-810913a5-54c9-4615-8817-f2307b8447be tempest-AttachVolumeTestJSON-1238928908 tempest-AttachVolumeTestJSON-1238928908-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.944s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 657.597917] env[62503]: INFO nova.compute.claims [None req-810913a5-54c9-4615-8817-f2307b8447be tempest-AttachVolumeTestJSON-1238928908 tempest-AttachVolumeTestJSON-1238928908-project-member] [instance: 2aa7880f-de24-4f32-b027-731a2030f987] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 657.706899] env[62503]: DEBUG nova.network.neutron [None req-b638fdbf-443c-4fc0-ac4f-b9d527537434 tempest-ImagesNegativeTestJSON-829730830 tempest-ImagesNegativeTestJSON-829730830-project-member] [instance: e49252e3-11cc-49c3-b959-24ad87ad48c9] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 657.810900] env[62503]: DEBUG nova.network.neutron [None req-b638fdbf-443c-4fc0-ac4f-b9d527537434 tempest-ImagesNegativeTestJSON-829730830 tempest-ImagesNegativeTestJSON-829730830-project-member] [instance: e49252e3-11cc-49c3-b959-24ad87ad48c9] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 657.877492] env[62503]: DEBUG nova.compute.manager [req-d821cfc4-8886-4f9a-9117-0f22e81dfeb3 req-b1054825-652c-48af-ac82-87a97689ce0c service nova] [instance: e49252e3-11cc-49c3-b959-24ad87ad48c9] Received event network-changed-ccf42a2d-595c-4892-aaea-44d57bac1bfb {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 657.878208] env[62503]: DEBUG nova.compute.manager [req-d821cfc4-8886-4f9a-9117-0f22e81dfeb3 req-b1054825-652c-48af-ac82-87a97689ce0c service nova] [instance: e49252e3-11cc-49c3-b959-24ad87ad48c9] Refreshing instance network info cache due to event network-changed-ccf42a2d-595c-4892-aaea-44d57bac1bfb. {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11432}} [ 657.878465] env[62503]: DEBUG oslo_concurrency.lockutils [req-d821cfc4-8886-4f9a-9117-0f22e81dfeb3 req-b1054825-652c-48af-ac82-87a97689ce0c service nova] Acquiring lock "refresh_cache-e49252e3-11cc-49c3-b959-24ad87ad48c9" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 658.105386] env[62503]: DEBUG nova.compute.utils [None req-f5db60c5-fc27-48c4-aa82-4f7bd2779675 tempest-ServersTestBootFromVolume-938742055 tempest-ServersTestBootFromVolume-938742055-project-member] Using /dev/sd instead of None {{(pid=62503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 658.107187] env[62503]: DEBUG nova.compute.manager [None req-f5db60c5-fc27-48c4-aa82-4f7bd2779675 tempest-ServersTestBootFromVolume-938742055 tempest-ServersTestBootFromVolume-938742055-project-member] [instance: 5ca1a33c-7324-481c-95cd-3761ce8ccf13] Allocating IP information in the background. {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 658.107187] env[62503]: DEBUG nova.network.neutron [None req-f5db60c5-fc27-48c4-aa82-4f7bd2779675 tempest-ServersTestBootFromVolume-938742055 tempest-ServersTestBootFromVolume-938742055-project-member] [instance: 5ca1a33c-7324-481c-95cd-3761ce8ccf13] allocate_for_instance() {{(pid=62503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 658.148385] env[62503]: DEBUG nova.policy [None req-f5db60c5-fc27-48c4-aa82-4f7bd2779675 tempest-ServersTestBootFromVolume-938742055 tempest-ServersTestBootFromVolume-938742055-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2384e124b0414fdcba5c108d1458d1f9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e119e5f30e734102a79e19c6b094200e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62503) authorize /opt/stack/nova/nova/policy.py:201}} [ 658.316879] env[62503]: DEBUG oslo_concurrency.lockutils [None req-b638fdbf-443c-4fc0-ac4f-b9d527537434 tempest-ImagesNegativeTestJSON-829730830 tempest-ImagesNegativeTestJSON-829730830-project-member] Releasing lock "refresh_cache-e49252e3-11cc-49c3-b959-24ad87ad48c9" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 658.316879] env[62503]: DEBUG nova.compute.manager [None req-b638fdbf-443c-4fc0-ac4f-b9d527537434 tempest-ImagesNegativeTestJSON-829730830 tempest-ImagesNegativeTestJSON-829730830-project-member] [instance: e49252e3-11cc-49c3-b959-24ad87ad48c9] Start destroying the instance on the hypervisor. {{(pid=62503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3156}} [ 658.316879] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-b638fdbf-443c-4fc0-ac4f-b9d527537434 tempest-ImagesNegativeTestJSON-829730830 tempest-ImagesNegativeTestJSON-829730830-project-member] [instance: e49252e3-11cc-49c3-b959-24ad87ad48c9] Destroying instance {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 658.316879] env[62503]: DEBUG oslo_concurrency.lockutils [req-d821cfc4-8886-4f9a-9117-0f22e81dfeb3 req-b1054825-652c-48af-ac82-87a97689ce0c service nova] Acquired lock "refresh_cache-e49252e3-11cc-49c3-b959-24ad87ad48c9" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 658.316879] env[62503]: DEBUG nova.network.neutron [req-d821cfc4-8886-4f9a-9117-0f22e81dfeb3 req-b1054825-652c-48af-ac82-87a97689ce0c service nova] [instance: e49252e3-11cc-49c3-b959-24ad87ad48c9] Refreshing network info cache for port ccf42a2d-595c-4892-aaea-44d57bac1bfb {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 658.317147] env[62503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e61bc0be-a147-495b-9a23-6e40998c5bf0 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.326573] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9e23fce-4c79-4f1e-b830-1a974da9a2ef {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.348442] env[62503]: WARNING nova.virt.vmwareapi.vmops [None req-b638fdbf-443c-4fc0-ac4f-b9d527537434 tempest-ImagesNegativeTestJSON-829730830 tempest-ImagesNegativeTestJSON-829730830-project-member] [instance: e49252e3-11cc-49c3-b959-24ad87ad48c9] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance e49252e3-11cc-49c3-b959-24ad87ad48c9 could not be found. [ 658.348649] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-b638fdbf-443c-4fc0-ac4f-b9d527537434 tempest-ImagesNegativeTestJSON-829730830 tempest-ImagesNegativeTestJSON-829730830-project-member] [instance: e49252e3-11cc-49c3-b959-24ad87ad48c9] Instance destroyed {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 658.348830] env[62503]: INFO nova.compute.manager [None req-b638fdbf-443c-4fc0-ac4f-b9d527537434 tempest-ImagesNegativeTestJSON-829730830 tempest-ImagesNegativeTestJSON-829730830-project-member] [instance: e49252e3-11cc-49c3-b959-24ad87ad48c9] Took 0.03 seconds to destroy the instance on the hypervisor. [ 658.349081] env[62503]: DEBUG oslo.service.loopingcall [None req-b638fdbf-443c-4fc0-ac4f-b9d527537434 tempest-ImagesNegativeTestJSON-829730830 tempest-ImagesNegativeTestJSON-829730830-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 658.349303] env[62503]: DEBUG nova.compute.manager [-] [instance: e49252e3-11cc-49c3-b959-24ad87ad48c9] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 658.349395] env[62503]: DEBUG nova.network.neutron [-] [instance: e49252e3-11cc-49c3-b959-24ad87ad48c9] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 658.367737] env[62503]: DEBUG nova.network.neutron [-] [instance: e49252e3-11cc-49c3-b959-24ad87ad48c9] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 658.445119] env[62503]: DEBUG nova.network.neutron [None req-f5db60c5-fc27-48c4-aa82-4f7bd2779675 tempest-ServersTestBootFromVolume-938742055 tempest-ServersTestBootFromVolume-938742055-project-member] [instance: 5ca1a33c-7324-481c-95cd-3761ce8ccf13] Successfully created port: 3298a0ff-80dc-479b-b87e-5cc3c0b3d16e {{(pid=62503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 658.610999] env[62503]: DEBUG nova.compute.manager [None req-f5db60c5-fc27-48c4-aa82-4f7bd2779675 tempest-ServersTestBootFromVolume-938742055 tempest-ServersTestBootFromVolume-938742055-project-member] [instance: 5ca1a33c-7324-481c-95cd-3761ce8ccf13] Start building block device mappings for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2867}} [ 658.838721] env[62503]: DEBUG nova.network.neutron [req-d821cfc4-8886-4f9a-9117-0f22e81dfeb3 req-b1054825-652c-48af-ac82-87a97689ce0c service nova] [instance: e49252e3-11cc-49c3-b959-24ad87ad48c9] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 658.870918] env[62503]: DEBUG nova.network.neutron [-] [instance: e49252e3-11cc-49c3-b959-24ad87ad48c9] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 658.933747] env[62503]: DEBUG nova.network.neutron [req-d821cfc4-8886-4f9a-9117-0f22e81dfeb3 req-b1054825-652c-48af-ac82-87a97689ce0c service nova] [instance: e49252e3-11cc-49c3-b959-24ad87ad48c9] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 658.999620] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9d9498e-41d6-4e0a-a2d7-b24abd3e4966 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.008321] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f83eef23-2e64-4135-a523-7a54d0fed10c {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.040223] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8979029-0b09-4c55-840c-ea12a4a40ae8 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.047467] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a07386dd-ef7e-4a50-8ed0-8324b48e4780 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.060441] env[62503]: DEBUG nova.compute.provider_tree [None req-810913a5-54c9-4615-8817-f2307b8447be tempest-AttachVolumeTestJSON-1238928908 tempest-AttachVolumeTestJSON-1238928908-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 659.117542] env[62503]: INFO nova.virt.block_device [None req-f5db60c5-fc27-48c4-aa82-4f7bd2779675 tempest-ServersTestBootFromVolume-938742055 tempest-ServersTestBootFromVolume-938742055-project-member] [instance: 5ca1a33c-7324-481c-95cd-3761ce8ccf13] Booting with volume 47ea7dbd-681f-4879-a669-091606d1ee4c at /dev/sda [ 659.165610] env[62503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-99d4bc01-cb46-415d-a033-36599df877c8 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.174553] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a15ac622-6152-4e88-a424-36ccac9a7fa0 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.200083] env[62503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6a6fb7ef-1f06-4094-8e15-824fbe80a9b0 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.207503] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60604b15-4db3-44c9-95bd-9c49533bfd68 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.229144] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bd50052-8eba-4a7a-b274-6fa9df66eef1 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.235319] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16203269-d866-4f68-91ad-672dc1ec87e0 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.249349] env[62503]: DEBUG nova.virt.block_device [None req-f5db60c5-fc27-48c4-aa82-4f7bd2779675 tempest-ServersTestBootFromVolume-938742055 tempest-ServersTestBootFromVolume-938742055-project-member] [instance: 5ca1a33c-7324-481c-95cd-3761ce8ccf13] Updating existing volume attachment record: 1afd413b-3c16-4fd0-9f26-59fb3818e194 {{(pid=62503) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 659.377596] env[62503]: INFO nova.compute.manager [-] [instance: e49252e3-11cc-49c3-b959-24ad87ad48c9] Took 1.03 seconds to deallocate network for instance. [ 659.380010] env[62503]: DEBUG nova.compute.claims [None req-b638fdbf-443c-4fc0-ac4f-b9d527537434 tempest-ImagesNegativeTestJSON-829730830 tempest-ImagesNegativeTestJSON-829730830-project-member] [instance: e49252e3-11cc-49c3-b959-24ad87ad48c9] Aborting claim: {{(pid=62503) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 659.380460] env[62503]: DEBUG oslo_concurrency.lockutils [None req-b638fdbf-443c-4fc0-ac4f-b9d527537434 tempest-ImagesNegativeTestJSON-829730830 tempest-ImagesNegativeTestJSON-829730830-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 659.436069] env[62503]: DEBUG oslo_concurrency.lockutils [req-d821cfc4-8886-4f9a-9117-0f22e81dfeb3 req-b1054825-652c-48af-ac82-87a97689ce0c service nova] Releasing lock "refresh_cache-e49252e3-11cc-49c3-b959-24ad87ad48c9" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 659.436350] env[62503]: DEBUG nova.compute.manager [req-d821cfc4-8886-4f9a-9117-0f22e81dfeb3 req-b1054825-652c-48af-ac82-87a97689ce0c service nova] [instance: e49252e3-11cc-49c3-b959-24ad87ad48c9] Received event network-vif-deleted-ccf42a2d-595c-4892-aaea-44d57bac1bfb {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 659.535369] env[62503]: ERROR nova.compute.manager [None req-f5db60c5-fc27-48c4-aa82-4f7bd2779675 tempest-ServersTestBootFromVolume-938742055 tempest-ServersTestBootFromVolume-938742055-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 3298a0ff-80dc-479b-b87e-5cc3c0b3d16e, please check neutron logs for more information. [ 659.535369] env[62503]: ERROR nova.compute.manager Traceback (most recent call last): [ 659.535369] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 659.535369] env[62503]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 659.535369] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 659.535369] env[62503]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 659.535369] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 659.535369] env[62503]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 659.535369] env[62503]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 659.535369] env[62503]: ERROR nova.compute.manager self.force_reraise() [ 659.535369] env[62503]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 659.535369] env[62503]: ERROR nova.compute.manager raise self.value [ 659.535369] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 659.535369] env[62503]: ERROR nova.compute.manager updated_port = self._update_port( [ 659.535369] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 659.535369] env[62503]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 659.536114] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 659.536114] env[62503]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 659.536114] env[62503]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 3298a0ff-80dc-479b-b87e-5cc3c0b3d16e, please check neutron logs for more information. [ 659.536114] env[62503]: ERROR nova.compute.manager [ 659.536114] env[62503]: Traceback (most recent call last): [ 659.536114] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 659.536114] env[62503]: listener.cb(fileno) [ 659.536114] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 659.536114] env[62503]: result = function(*args, **kwargs) [ 659.536114] env[62503]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 659.536114] env[62503]: return func(*args, **kwargs) [ 659.536114] env[62503]: File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 659.536114] env[62503]: raise e [ 659.536114] env[62503]: File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 659.536114] env[62503]: nwinfo = self.network_api.allocate_for_instance( [ 659.536114] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 659.536114] env[62503]: created_port_ids = self._update_ports_for_instance( [ 659.536114] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 659.536114] env[62503]: with excutils.save_and_reraise_exception(): [ 659.536114] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 659.536114] env[62503]: self.force_reraise() [ 659.536114] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 659.536114] env[62503]: raise self.value [ 659.536114] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 659.536114] env[62503]: updated_port = self._update_port( [ 659.536114] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 659.536114] env[62503]: _ensure_no_port_binding_failure(port) [ 659.536114] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 659.536114] env[62503]: raise exception.PortBindingFailed(port_id=port['id']) [ 659.537178] env[62503]: nova.exception.PortBindingFailed: Binding failed for port 3298a0ff-80dc-479b-b87e-5cc3c0b3d16e, please check neutron logs for more information. [ 659.537178] env[62503]: Removing descriptor: 16 [ 659.566806] env[62503]: DEBUG nova.scheduler.client.report [None req-810913a5-54c9-4615-8817-f2307b8447be tempest-AttachVolumeTestJSON-1238928908 tempest-AttachVolumeTestJSON-1238928908-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 659.904698] env[62503]: DEBUG nova.compute.manager [req-bc15616e-a7d9-45ff-8a67-b4ba74114c5e req-8209d84d-c992-4878-8072-1aefab7d6471 service nova] [instance: 5ca1a33c-7324-481c-95cd-3761ce8ccf13] Received event network-changed-3298a0ff-80dc-479b-b87e-5cc3c0b3d16e {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 659.904908] env[62503]: DEBUG nova.compute.manager [req-bc15616e-a7d9-45ff-8a67-b4ba74114c5e req-8209d84d-c992-4878-8072-1aefab7d6471 service nova] [instance: 5ca1a33c-7324-481c-95cd-3761ce8ccf13] Refreshing instance network info cache due to event network-changed-3298a0ff-80dc-479b-b87e-5cc3c0b3d16e. {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11432}} [ 659.905145] env[62503]: DEBUG oslo_concurrency.lockutils [req-bc15616e-a7d9-45ff-8a67-b4ba74114c5e req-8209d84d-c992-4878-8072-1aefab7d6471 service nova] Acquiring lock "refresh_cache-5ca1a33c-7324-481c-95cd-3761ce8ccf13" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 659.905292] env[62503]: DEBUG oslo_concurrency.lockutils [req-bc15616e-a7d9-45ff-8a67-b4ba74114c5e req-8209d84d-c992-4878-8072-1aefab7d6471 service nova] Acquired lock "refresh_cache-5ca1a33c-7324-481c-95cd-3761ce8ccf13" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 659.905633] env[62503]: DEBUG nova.network.neutron [req-bc15616e-a7d9-45ff-8a67-b4ba74114c5e req-8209d84d-c992-4878-8072-1aefab7d6471 service nova] [instance: 5ca1a33c-7324-481c-95cd-3761ce8ccf13] Refreshing network info cache for port 3298a0ff-80dc-479b-b87e-5cc3c0b3d16e {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 660.075529] env[62503]: DEBUG oslo_concurrency.lockutils [None req-810913a5-54c9-4615-8817-f2307b8447be tempest-AttachVolumeTestJSON-1238928908 tempest-AttachVolumeTestJSON-1238928908-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.479s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 660.076073] env[62503]: DEBUG nova.compute.manager [None req-810913a5-54c9-4615-8817-f2307b8447be tempest-AttachVolumeTestJSON-1238928908 tempest-AttachVolumeTestJSON-1238928908-project-member] [instance: 2aa7880f-de24-4f32-b027-731a2030f987] Start building networks asynchronously for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2832}} [ 660.078870] env[62503]: DEBUG oslo_concurrency.lockutils [None req-2f620b9e-0fbe-4cd8-a880-2520c6ad4568 tempest-DeleteServersAdminTestJSON-318637377 tempest-DeleteServersAdminTestJSON-318637377-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.805s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 660.424545] env[62503]: DEBUG nova.network.neutron [req-bc15616e-a7d9-45ff-8a67-b4ba74114c5e req-8209d84d-c992-4878-8072-1aefab7d6471 service nova] [instance: 5ca1a33c-7324-481c-95cd-3761ce8ccf13] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 660.519201] env[62503]: DEBUG nova.network.neutron [req-bc15616e-a7d9-45ff-8a67-b4ba74114c5e req-8209d84d-c992-4878-8072-1aefab7d6471 service nova] [instance: 5ca1a33c-7324-481c-95cd-3761ce8ccf13] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 660.585512] env[62503]: DEBUG nova.compute.utils [None req-810913a5-54c9-4615-8817-f2307b8447be tempest-AttachVolumeTestJSON-1238928908 tempest-AttachVolumeTestJSON-1238928908-project-member] Using /dev/sd instead of None {{(pid=62503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 660.590057] env[62503]: DEBUG nova.compute.manager [None req-810913a5-54c9-4615-8817-f2307b8447be tempest-AttachVolumeTestJSON-1238928908 tempest-AttachVolumeTestJSON-1238928908-project-member] [instance: 2aa7880f-de24-4f32-b027-731a2030f987] Allocating IP information in the background. {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 660.590235] env[62503]: DEBUG nova.network.neutron [None req-810913a5-54c9-4615-8817-f2307b8447be tempest-AttachVolumeTestJSON-1238928908 tempest-AttachVolumeTestJSON-1238928908-project-member] [instance: 2aa7880f-de24-4f32-b027-731a2030f987] allocate_for_instance() {{(pid=62503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 660.631914] env[62503]: DEBUG nova.policy [None req-810913a5-54c9-4615-8817-f2307b8447be tempest-AttachVolumeTestJSON-1238928908 tempest-AttachVolumeTestJSON-1238928908-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd50229f835b5492e9ade7d1deb12cf1b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'eabfbceb13fa4254a63e5c69490241a1', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62503) authorize /opt/stack/nova/nova/policy.py:201}} [ 660.958863] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a121459-405c-411b-b726-d9a8f7c886b7 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.966580] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7257bc2-1fb7-4c24-9ca6-29879f61e800 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.996502] env[62503]: DEBUG nova.network.neutron [None req-810913a5-54c9-4615-8817-f2307b8447be tempest-AttachVolumeTestJSON-1238928908 tempest-AttachVolumeTestJSON-1238928908-project-member] [instance: 2aa7880f-de24-4f32-b027-731a2030f987] Successfully created port: 18f34f69-8a6c-4c87-af89-d119386b3019 {{(pid=62503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 660.998656] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23046093-6c84-407e-b697-9318dd76e281 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.005950] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f281d0c2-d4a0-4df6-9b81-ab80c1598979 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.019945] env[62503]: DEBUG nova.compute.provider_tree [None req-2f620b9e-0fbe-4cd8-a880-2520c6ad4568 tempest-DeleteServersAdminTestJSON-318637377 tempest-DeleteServersAdminTestJSON-318637377-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 661.021165] env[62503]: DEBUG oslo_concurrency.lockutils [req-bc15616e-a7d9-45ff-8a67-b4ba74114c5e req-8209d84d-c992-4878-8072-1aefab7d6471 service nova] Releasing lock "refresh_cache-5ca1a33c-7324-481c-95cd-3761ce8ccf13" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 661.021358] env[62503]: DEBUG nova.compute.manager [req-bc15616e-a7d9-45ff-8a67-b4ba74114c5e req-8209d84d-c992-4878-8072-1aefab7d6471 service nova] [instance: 5ca1a33c-7324-481c-95cd-3761ce8ccf13] Received event network-vif-deleted-3298a0ff-80dc-479b-b87e-5cc3c0b3d16e {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 661.091521] env[62503]: DEBUG nova.compute.manager [None req-810913a5-54c9-4615-8817-f2307b8447be tempest-AttachVolumeTestJSON-1238928908 tempest-AttachVolumeTestJSON-1238928908-project-member] [instance: 2aa7880f-de24-4f32-b027-731a2030f987] Start building block device mappings for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2867}} [ 661.340175] env[62503]: DEBUG nova.compute.manager [None req-f5db60c5-fc27-48c4-aa82-4f7bd2779675 tempest-ServersTestBootFromVolume-938742055 tempest-ServersTestBootFromVolume-938742055-project-member] [instance: 5ca1a33c-7324-481c-95cd-3761ce8ccf13] Start spawning the instance on the hypervisor. {{(pid=62503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2641}} [ 661.340938] env[62503]: DEBUG nova.virt.hardware [None req-f5db60c5-fc27-48c4-aa82-4f7bd2779675 tempest-ServersTestBootFromVolume-938742055 tempest-ServersTestBootFromVolume-938742055-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:26:20Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 661.341210] env[62503]: DEBUG nova.virt.hardware [None req-f5db60c5-fc27-48c4-aa82-4f7bd2779675 tempest-ServersTestBootFromVolume-938742055 tempest-ServersTestBootFromVolume-938742055-project-member] Flavor limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 661.341442] env[62503]: DEBUG nova.virt.hardware [None req-f5db60c5-fc27-48c4-aa82-4f7bd2779675 tempest-ServersTestBootFromVolume-938742055 tempest-ServersTestBootFromVolume-938742055-project-member] Image limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 661.341653] env[62503]: DEBUG nova.virt.hardware [None req-f5db60c5-fc27-48c4-aa82-4f7bd2779675 tempest-ServersTestBootFromVolume-938742055 tempest-ServersTestBootFromVolume-938742055-project-member] Flavor pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 661.341797] env[62503]: DEBUG nova.virt.hardware [None req-f5db60c5-fc27-48c4-aa82-4f7bd2779675 tempest-ServersTestBootFromVolume-938742055 tempest-ServersTestBootFromVolume-938742055-project-member] Image pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 661.341958] env[62503]: DEBUG nova.virt.hardware [None req-f5db60c5-fc27-48c4-aa82-4f7bd2779675 tempest-ServersTestBootFromVolume-938742055 tempest-ServersTestBootFromVolume-938742055-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 661.343071] env[62503]: DEBUG nova.virt.hardware [None req-f5db60c5-fc27-48c4-aa82-4f7bd2779675 tempest-ServersTestBootFromVolume-938742055 tempest-ServersTestBootFromVolume-938742055-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 661.343071] env[62503]: DEBUG nova.virt.hardware [None req-f5db60c5-fc27-48c4-aa82-4f7bd2779675 tempest-ServersTestBootFromVolume-938742055 tempest-ServersTestBootFromVolume-938742055-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 661.343071] env[62503]: DEBUG nova.virt.hardware [None req-f5db60c5-fc27-48c4-aa82-4f7bd2779675 tempest-ServersTestBootFromVolume-938742055 tempest-ServersTestBootFromVolume-938742055-project-member] Got 1 possible topologies {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 661.343071] env[62503]: DEBUG nova.virt.hardware [None req-f5db60c5-fc27-48c4-aa82-4f7bd2779675 tempest-ServersTestBootFromVolume-938742055 tempest-ServersTestBootFromVolume-938742055-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 661.343071] env[62503]: DEBUG nova.virt.hardware [None req-f5db60c5-fc27-48c4-aa82-4f7bd2779675 tempest-ServersTestBootFromVolume-938742055 tempest-ServersTestBootFromVolume-938742055-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 661.344161] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b58f9b4a-d344-47c9-8b81-31d9170826aa {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.352819] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62693a4a-dbfd-4452-98fd-ac3bdad30123 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.367075] env[62503]: ERROR nova.compute.manager [None req-f5db60c5-fc27-48c4-aa82-4f7bd2779675 tempest-ServersTestBootFromVolume-938742055 tempest-ServersTestBootFromVolume-938742055-project-member] [instance: 5ca1a33c-7324-481c-95cd-3761ce8ccf13] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 3298a0ff-80dc-479b-b87e-5cc3c0b3d16e, please check neutron logs for more information. [ 661.367075] env[62503]: ERROR nova.compute.manager [instance: 5ca1a33c-7324-481c-95cd-3761ce8ccf13] Traceback (most recent call last): [ 661.367075] env[62503]: ERROR nova.compute.manager [instance: 5ca1a33c-7324-481c-95cd-3761ce8ccf13] File "/opt/stack/nova/nova/compute/manager.py", line 2897, in _build_resources [ 661.367075] env[62503]: ERROR nova.compute.manager [instance: 5ca1a33c-7324-481c-95cd-3761ce8ccf13] yield resources [ 661.367075] env[62503]: ERROR nova.compute.manager [instance: 5ca1a33c-7324-481c-95cd-3761ce8ccf13] File "/opt/stack/nova/nova/compute/manager.py", line 2644, in _build_and_run_instance [ 661.367075] env[62503]: ERROR nova.compute.manager [instance: 5ca1a33c-7324-481c-95cd-3761ce8ccf13] self.driver.spawn(context, instance, image_meta, [ 661.367075] env[62503]: ERROR nova.compute.manager [instance: 5ca1a33c-7324-481c-95cd-3761ce8ccf13] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 661.367075] env[62503]: ERROR nova.compute.manager [instance: 5ca1a33c-7324-481c-95cd-3761ce8ccf13] self._vmops.spawn(context, instance, image_meta, injected_files, [ 661.367075] env[62503]: ERROR nova.compute.manager [instance: 5ca1a33c-7324-481c-95cd-3761ce8ccf13] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 661.367075] env[62503]: ERROR nova.compute.manager [instance: 5ca1a33c-7324-481c-95cd-3761ce8ccf13] vm_ref = self.build_virtual_machine(instance, [ 661.367075] env[62503]: ERROR nova.compute.manager [instance: 5ca1a33c-7324-481c-95cd-3761ce8ccf13] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 661.367621] env[62503]: ERROR nova.compute.manager [instance: 5ca1a33c-7324-481c-95cd-3761ce8ccf13] vif_infos = vmwarevif.get_vif_info(self._session, [ 661.367621] env[62503]: ERROR nova.compute.manager [instance: 5ca1a33c-7324-481c-95cd-3761ce8ccf13] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 661.367621] env[62503]: ERROR nova.compute.manager [instance: 5ca1a33c-7324-481c-95cd-3761ce8ccf13] for vif in network_info: [ 661.367621] env[62503]: ERROR nova.compute.manager [instance: 5ca1a33c-7324-481c-95cd-3761ce8ccf13] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 661.367621] env[62503]: ERROR nova.compute.manager [instance: 5ca1a33c-7324-481c-95cd-3761ce8ccf13] return self._sync_wrapper(fn, *args, **kwargs) [ 661.367621] env[62503]: ERROR nova.compute.manager [instance: 5ca1a33c-7324-481c-95cd-3761ce8ccf13] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 661.367621] env[62503]: ERROR nova.compute.manager [instance: 5ca1a33c-7324-481c-95cd-3761ce8ccf13] self.wait() [ 661.367621] env[62503]: ERROR nova.compute.manager [instance: 5ca1a33c-7324-481c-95cd-3761ce8ccf13] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 661.367621] env[62503]: ERROR nova.compute.manager [instance: 5ca1a33c-7324-481c-95cd-3761ce8ccf13] self[:] = self._gt.wait() [ 661.367621] env[62503]: ERROR nova.compute.manager [instance: 5ca1a33c-7324-481c-95cd-3761ce8ccf13] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 661.367621] env[62503]: ERROR nova.compute.manager [instance: 5ca1a33c-7324-481c-95cd-3761ce8ccf13] return self._exit_event.wait() [ 661.367621] env[62503]: ERROR nova.compute.manager [instance: 5ca1a33c-7324-481c-95cd-3761ce8ccf13] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 661.367621] env[62503]: ERROR nova.compute.manager [instance: 5ca1a33c-7324-481c-95cd-3761ce8ccf13] current.throw(*self._exc) [ 661.368164] env[62503]: ERROR nova.compute.manager [instance: 5ca1a33c-7324-481c-95cd-3761ce8ccf13] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 661.368164] env[62503]: ERROR nova.compute.manager [instance: 5ca1a33c-7324-481c-95cd-3761ce8ccf13] result = function(*args, **kwargs) [ 661.368164] env[62503]: ERROR nova.compute.manager [instance: 5ca1a33c-7324-481c-95cd-3761ce8ccf13] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 661.368164] env[62503]: ERROR nova.compute.manager [instance: 5ca1a33c-7324-481c-95cd-3761ce8ccf13] return func(*args, **kwargs) [ 661.368164] env[62503]: ERROR nova.compute.manager [instance: 5ca1a33c-7324-481c-95cd-3761ce8ccf13] File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 661.368164] env[62503]: ERROR nova.compute.manager [instance: 5ca1a33c-7324-481c-95cd-3761ce8ccf13] raise e [ 661.368164] env[62503]: ERROR nova.compute.manager [instance: 5ca1a33c-7324-481c-95cd-3761ce8ccf13] File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 661.368164] env[62503]: ERROR nova.compute.manager [instance: 5ca1a33c-7324-481c-95cd-3761ce8ccf13] nwinfo = self.network_api.allocate_for_instance( [ 661.368164] env[62503]: ERROR nova.compute.manager [instance: 5ca1a33c-7324-481c-95cd-3761ce8ccf13] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 661.368164] env[62503]: ERROR nova.compute.manager [instance: 5ca1a33c-7324-481c-95cd-3761ce8ccf13] created_port_ids = self._update_ports_for_instance( [ 661.368164] env[62503]: ERROR nova.compute.manager [instance: 5ca1a33c-7324-481c-95cd-3761ce8ccf13] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 661.368164] env[62503]: ERROR nova.compute.manager [instance: 5ca1a33c-7324-481c-95cd-3761ce8ccf13] with excutils.save_and_reraise_exception(): [ 661.368164] env[62503]: ERROR nova.compute.manager [instance: 5ca1a33c-7324-481c-95cd-3761ce8ccf13] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 661.368702] env[62503]: ERROR nova.compute.manager [instance: 5ca1a33c-7324-481c-95cd-3761ce8ccf13] self.force_reraise() [ 661.368702] env[62503]: ERROR nova.compute.manager [instance: 5ca1a33c-7324-481c-95cd-3761ce8ccf13] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 661.368702] env[62503]: ERROR nova.compute.manager [instance: 5ca1a33c-7324-481c-95cd-3761ce8ccf13] raise self.value [ 661.368702] env[62503]: ERROR nova.compute.manager [instance: 5ca1a33c-7324-481c-95cd-3761ce8ccf13] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 661.368702] env[62503]: ERROR nova.compute.manager [instance: 5ca1a33c-7324-481c-95cd-3761ce8ccf13] updated_port = self._update_port( [ 661.368702] env[62503]: ERROR nova.compute.manager [instance: 5ca1a33c-7324-481c-95cd-3761ce8ccf13] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 661.368702] env[62503]: ERROR nova.compute.manager [instance: 5ca1a33c-7324-481c-95cd-3761ce8ccf13] _ensure_no_port_binding_failure(port) [ 661.368702] env[62503]: ERROR nova.compute.manager [instance: 5ca1a33c-7324-481c-95cd-3761ce8ccf13] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 661.368702] env[62503]: ERROR nova.compute.manager [instance: 5ca1a33c-7324-481c-95cd-3761ce8ccf13] raise exception.PortBindingFailed(port_id=port['id']) [ 661.368702] env[62503]: ERROR nova.compute.manager [instance: 5ca1a33c-7324-481c-95cd-3761ce8ccf13] nova.exception.PortBindingFailed: Binding failed for port 3298a0ff-80dc-479b-b87e-5cc3c0b3d16e, please check neutron logs for more information. [ 661.368702] env[62503]: ERROR nova.compute.manager [instance: 5ca1a33c-7324-481c-95cd-3761ce8ccf13] [ 661.368702] env[62503]: INFO nova.compute.manager [None req-f5db60c5-fc27-48c4-aa82-4f7bd2779675 tempest-ServersTestBootFromVolume-938742055 tempest-ServersTestBootFromVolume-938742055-project-member] [instance: 5ca1a33c-7324-481c-95cd-3761ce8ccf13] Terminating instance [ 661.369445] env[62503]: DEBUG oslo_concurrency.lockutils [None req-f5db60c5-fc27-48c4-aa82-4f7bd2779675 tempest-ServersTestBootFromVolume-938742055 tempest-ServersTestBootFromVolume-938742055-project-member] Acquiring lock "refresh_cache-5ca1a33c-7324-481c-95cd-3761ce8ccf13" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 661.369603] env[62503]: DEBUG oslo_concurrency.lockutils [None req-f5db60c5-fc27-48c4-aa82-4f7bd2779675 tempest-ServersTestBootFromVolume-938742055 tempest-ServersTestBootFromVolume-938742055-project-member] Acquired lock "refresh_cache-5ca1a33c-7324-481c-95cd-3761ce8ccf13" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 661.369768] env[62503]: DEBUG nova.network.neutron [None req-f5db60c5-fc27-48c4-aa82-4f7bd2779675 tempest-ServersTestBootFromVolume-938742055 tempest-ServersTestBootFromVolume-938742055-project-member] [instance: 5ca1a33c-7324-481c-95cd-3761ce8ccf13] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 661.524017] env[62503]: DEBUG nova.scheduler.client.report [None req-2f620b9e-0fbe-4cd8-a880-2520c6ad4568 tempest-DeleteServersAdminTestJSON-318637377 tempest-DeleteServersAdminTestJSON-318637377-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 661.887956] env[62503]: DEBUG nova.network.neutron [None req-f5db60c5-fc27-48c4-aa82-4f7bd2779675 tempest-ServersTestBootFromVolume-938742055 tempest-ServersTestBootFromVolume-938742055-project-member] [instance: 5ca1a33c-7324-481c-95cd-3761ce8ccf13] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 661.925320] env[62503]: ERROR nova.compute.manager [None req-810913a5-54c9-4615-8817-f2307b8447be tempest-AttachVolumeTestJSON-1238928908 tempest-AttachVolumeTestJSON-1238928908-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 18f34f69-8a6c-4c87-af89-d119386b3019, please check neutron logs for more information. [ 661.925320] env[62503]: ERROR nova.compute.manager Traceback (most recent call last): [ 661.925320] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 661.925320] env[62503]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 661.925320] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 661.925320] env[62503]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 661.925320] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 661.925320] env[62503]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 661.925320] env[62503]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 661.925320] env[62503]: ERROR nova.compute.manager self.force_reraise() [ 661.925320] env[62503]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 661.925320] env[62503]: ERROR nova.compute.manager raise self.value [ 661.925320] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 661.925320] env[62503]: ERROR nova.compute.manager updated_port = self._update_port( [ 661.925320] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 661.925320] env[62503]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 661.925972] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 661.925972] env[62503]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 661.925972] env[62503]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 18f34f69-8a6c-4c87-af89-d119386b3019, please check neutron logs for more information. [ 661.925972] env[62503]: ERROR nova.compute.manager [ 661.925972] env[62503]: Traceback (most recent call last): [ 661.925972] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 661.925972] env[62503]: listener.cb(fileno) [ 661.925972] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 661.925972] env[62503]: result = function(*args, **kwargs) [ 661.925972] env[62503]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 661.925972] env[62503]: return func(*args, **kwargs) [ 661.925972] env[62503]: File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 661.925972] env[62503]: raise e [ 661.925972] env[62503]: File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 661.925972] env[62503]: nwinfo = self.network_api.allocate_for_instance( [ 661.925972] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 661.925972] env[62503]: created_port_ids = self._update_ports_for_instance( [ 661.925972] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 661.925972] env[62503]: with excutils.save_and_reraise_exception(): [ 661.925972] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 661.925972] env[62503]: self.force_reraise() [ 661.925972] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 661.925972] env[62503]: raise self.value [ 661.925972] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 661.925972] env[62503]: updated_port = self._update_port( [ 661.925972] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 661.925972] env[62503]: _ensure_no_port_binding_failure(port) [ 661.925972] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 661.925972] env[62503]: raise exception.PortBindingFailed(port_id=port['id']) [ 661.926980] env[62503]: nova.exception.PortBindingFailed: Binding failed for port 18f34f69-8a6c-4c87-af89-d119386b3019, please check neutron logs for more information. [ 661.926980] env[62503]: Removing descriptor: 16 [ 661.949610] env[62503]: DEBUG nova.compute.manager [req-9a1a5936-e336-4e70-8beb-c7d9439021b2 req-b6d81a2f-9a7c-4008-9b1f-0ba440b87e8b service nova] [instance: 2aa7880f-de24-4f32-b027-731a2030f987] Received event network-changed-18f34f69-8a6c-4c87-af89-d119386b3019 {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 661.949610] env[62503]: DEBUG nova.compute.manager [req-9a1a5936-e336-4e70-8beb-c7d9439021b2 req-b6d81a2f-9a7c-4008-9b1f-0ba440b87e8b service nova] [instance: 2aa7880f-de24-4f32-b027-731a2030f987] Refreshing instance network info cache due to event network-changed-18f34f69-8a6c-4c87-af89-d119386b3019. {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11432}} [ 661.949880] env[62503]: DEBUG oslo_concurrency.lockutils [req-9a1a5936-e336-4e70-8beb-c7d9439021b2 req-b6d81a2f-9a7c-4008-9b1f-0ba440b87e8b service nova] Acquiring lock "refresh_cache-2aa7880f-de24-4f32-b027-731a2030f987" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 661.949880] env[62503]: DEBUG oslo_concurrency.lockutils [req-9a1a5936-e336-4e70-8beb-c7d9439021b2 req-b6d81a2f-9a7c-4008-9b1f-0ba440b87e8b service nova] Acquired lock "refresh_cache-2aa7880f-de24-4f32-b027-731a2030f987" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 661.949934] env[62503]: DEBUG nova.network.neutron [req-9a1a5936-e336-4e70-8beb-c7d9439021b2 req-b6d81a2f-9a7c-4008-9b1f-0ba440b87e8b service nova] [instance: 2aa7880f-de24-4f32-b027-731a2030f987] Refreshing network info cache for port 18f34f69-8a6c-4c87-af89-d119386b3019 {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 661.978511] env[62503]: DEBUG nova.network.neutron [None req-f5db60c5-fc27-48c4-aa82-4f7bd2779675 tempest-ServersTestBootFromVolume-938742055 tempest-ServersTestBootFromVolume-938742055-project-member] [instance: 5ca1a33c-7324-481c-95cd-3761ce8ccf13] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 662.027082] env[62503]: DEBUG oslo_concurrency.lockutils [None req-2f620b9e-0fbe-4cd8-a880-2520c6ad4568 tempest-DeleteServersAdminTestJSON-318637377 tempest-DeleteServersAdminTestJSON-318637377-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.948s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 662.027774] env[62503]: ERROR nova.compute.manager [None req-2f620b9e-0fbe-4cd8-a880-2520c6ad4568 tempest-DeleteServersAdminTestJSON-318637377 tempest-DeleteServersAdminTestJSON-318637377-project-member] [instance: 73142a3f-3be8-4956-90f8-6ca223d2d01f] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 95cc94d9-2d68-4482-b2e3-5457ec2518c9, please check neutron logs for more information. [ 662.027774] env[62503]: ERROR nova.compute.manager [instance: 73142a3f-3be8-4956-90f8-6ca223d2d01f] Traceback (most recent call last): [ 662.027774] env[62503]: ERROR nova.compute.manager [instance: 73142a3f-3be8-4956-90f8-6ca223d2d01f] File "/opt/stack/nova/nova/compute/manager.py", line 2644, in _build_and_run_instance [ 662.027774] env[62503]: ERROR nova.compute.manager [instance: 73142a3f-3be8-4956-90f8-6ca223d2d01f] self.driver.spawn(context, instance, image_meta, [ 662.027774] env[62503]: ERROR nova.compute.manager [instance: 73142a3f-3be8-4956-90f8-6ca223d2d01f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 662.027774] env[62503]: ERROR nova.compute.manager [instance: 73142a3f-3be8-4956-90f8-6ca223d2d01f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 662.027774] env[62503]: ERROR nova.compute.manager [instance: 73142a3f-3be8-4956-90f8-6ca223d2d01f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 662.027774] env[62503]: ERROR nova.compute.manager [instance: 73142a3f-3be8-4956-90f8-6ca223d2d01f] vm_ref = self.build_virtual_machine(instance, [ 662.027774] env[62503]: ERROR nova.compute.manager [instance: 73142a3f-3be8-4956-90f8-6ca223d2d01f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 662.027774] env[62503]: ERROR nova.compute.manager [instance: 73142a3f-3be8-4956-90f8-6ca223d2d01f] vif_infos = vmwarevif.get_vif_info(self._session, [ 662.027774] env[62503]: ERROR nova.compute.manager [instance: 73142a3f-3be8-4956-90f8-6ca223d2d01f] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 662.028240] env[62503]: ERROR nova.compute.manager [instance: 73142a3f-3be8-4956-90f8-6ca223d2d01f] for vif in network_info: [ 662.028240] env[62503]: ERROR nova.compute.manager [instance: 73142a3f-3be8-4956-90f8-6ca223d2d01f] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 662.028240] env[62503]: ERROR nova.compute.manager [instance: 73142a3f-3be8-4956-90f8-6ca223d2d01f] return self._sync_wrapper(fn, *args, **kwargs) [ 662.028240] env[62503]: ERROR nova.compute.manager [instance: 73142a3f-3be8-4956-90f8-6ca223d2d01f] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 662.028240] env[62503]: ERROR nova.compute.manager [instance: 73142a3f-3be8-4956-90f8-6ca223d2d01f] self.wait() [ 662.028240] env[62503]: ERROR nova.compute.manager [instance: 73142a3f-3be8-4956-90f8-6ca223d2d01f] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 662.028240] env[62503]: ERROR nova.compute.manager [instance: 73142a3f-3be8-4956-90f8-6ca223d2d01f] self[:] = self._gt.wait() [ 662.028240] env[62503]: ERROR nova.compute.manager [instance: 73142a3f-3be8-4956-90f8-6ca223d2d01f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 662.028240] env[62503]: ERROR nova.compute.manager [instance: 73142a3f-3be8-4956-90f8-6ca223d2d01f] return self._exit_event.wait() [ 662.028240] env[62503]: ERROR nova.compute.manager [instance: 73142a3f-3be8-4956-90f8-6ca223d2d01f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 662.028240] env[62503]: ERROR nova.compute.manager [instance: 73142a3f-3be8-4956-90f8-6ca223d2d01f] result = hub.switch() [ 662.028240] env[62503]: ERROR nova.compute.manager [instance: 73142a3f-3be8-4956-90f8-6ca223d2d01f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 662.028240] env[62503]: ERROR nova.compute.manager [instance: 73142a3f-3be8-4956-90f8-6ca223d2d01f] return self.greenlet.switch() [ 662.028659] env[62503]: ERROR nova.compute.manager [instance: 73142a3f-3be8-4956-90f8-6ca223d2d01f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 662.028659] env[62503]: ERROR nova.compute.manager [instance: 73142a3f-3be8-4956-90f8-6ca223d2d01f] result = function(*args, **kwargs) [ 662.028659] env[62503]: ERROR nova.compute.manager [instance: 73142a3f-3be8-4956-90f8-6ca223d2d01f] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 662.028659] env[62503]: ERROR nova.compute.manager [instance: 73142a3f-3be8-4956-90f8-6ca223d2d01f] return func(*args, **kwargs) [ 662.028659] env[62503]: ERROR nova.compute.manager [instance: 73142a3f-3be8-4956-90f8-6ca223d2d01f] File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 662.028659] env[62503]: ERROR nova.compute.manager [instance: 73142a3f-3be8-4956-90f8-6ca223d2d01f] raise e [ 662.028659] env[62503]: ERROR nova.compute.manager [instance: 73142a3f-3be8-4956-90f8-6ca223d2d01f] File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 662.028659] env[62503]: ERROR nova.compute.manager [instance: 73142a3f-3be8-4956-90f8-6ca223d2d01f] nwinfo = self.network_api.allocate_for_instance( [ 662.028659] env[62503]: ERROR nova.compute.manager [instance: 73142a3f-3be8-4956-90f8-6ca223d2d01f] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 662.028659] env[62503]: ERROR nova.compute.manager [instance: 73142a3f-3be8-4956-90f8-6ca223d2d01f] created_port_ids = self._update_ports_for_instance( [ 662.028659] env[62503]: ERROR nova.compute.manager [instance: 73142a3f-3be8-4956-90f8-6ca223d2d01f] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 662.028659] env[62503]: ERROR nova.compute.manager [instance: 73142a3f-3be8-4956-90f8-6ca223d2d01f] with excutils.save_and_reraise_exception(): [ 662.028659] env[62503]: ERROR nova.compute.manager [instance: 73142a3f-3be8-4956-90f8-6ca223d2d01f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 662.029037] env[62503]: ERROR nova.compute.manager [instance: 73142a3f-3be8-4956-90f8-6ca223d2d01f] self.force_reraise() [ 662.029037] env[62503]: ERROR nova.compute.manager [instance: 73142a3f-3be8-4956-90f8-6ca223d2d01f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 662.029037] env[62503]: ERROR nova.compute.manager [instance: 73142a3f-3be8-4956-90f8-6ca223d2d01f] raise self.value [ 662.029037] env[62503]: ERROR nova.compute.manager [instance: 73142a3f-3be8-4956-90f8-6ca223d2d01f] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 662.029037] env[62503]: ERROR nova.compute.manager [instance: 73142a3f-3be8-4956-90f8-6ca223d2d01f] updated_port = self._update_port( [ 662.029037] env[62503]: ERROR nova.compute.manager [instance: 73142a3f-3be8-4956-90f8-6ca223d2d01f] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 662.029037] env[62503]: ERROR nova.compute.manager [instance: 73142a3f-3be8-4956-90f8-6ca223d2d01f] _ensure_no_port_binding_failure(port) [ 662.029037] env[62503]: ERROR nova.compute.manager [instance: 73142a3f-3be8-4956-90f8-6ca223d2d01f] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 662.029037] env[62503]: ERROR nova.compute.manager [instance: 73142a3f-3be8-4956-90f8-6ca223d2d01f] raise exception.PortBindingFailed(port_id=port['id']) [ 662.029037] env[62503]: ERROR nova.compute.manager [instance: 73142a3f-3be8-4956-90f8-6ca223d2d01f] nova.exception.PortBindingFailed: Binding failed for port 95cc94d9-2d68-4482-b2e3-5457ec2518c9, please check neutron logs for more information. [ 662.029037] env[62503]: ERROR nova.compute.manager [instance: 73142a3f-3be8-4956-90f8-6ca223d2d01f] [ 662.029592] env[62503]: DEBUG nova.compute.utils [None req-2f620b9e-0fbe-4cd8-a880-2520c6ad4568 tempest-DeleteServersAdminTestJSON-318637377 tempest-DeleteServersAdminTestJSON-318637377-project-member] [instance: 73142a3f-3be8-4956-90f8-6ca223d2d01f] Binding failed for port 95cc94d9-2d68-4482-b2e3-5457ec2518c9, please check neutron logs for more information. {{(pid=62503) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 662.029841] env[62503]: DEBUG oslo_concurrency.lockutils [None req-04b5e2fe-420a-4980-828f-5aaaaac6b6d6 tempest-MigrationsAdminTest-1483146718 tempest-MigrationsAdminTest-1483146718-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.555s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 662.031581] env[62503]: INFO nova.compute.claims [None req-04b5e2fe-420a-4980-828f-5aaaaac6b6d6 tempest-MigrationsAdminTest-1483146718 tempest-MigrationsAdminTest-1483146718-project-member] [instance: 0af1e65d-ca88-475e-a871-4087bd49cd9d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 662.034216] env[62503]: DEBUG nova.compute.manager [None req-2f620b9e-0fbe-4cd8-a880-2520c6ad4568 tempest-DeleteServersAdminTestJSON-318637377 tempest-DeleteServersAdminTestJSON-318637377-project-member] [instance: 73142a3f-3be8-4956-90f8-6ca223d2d01f] Build of instance 73142a3f-3be8-4956-90f8-6ca223d2d01f was re-scheduled: Binding failed for port 95cc94d9-2d68-4482-b2e3-5457ec2518c9, please check neutron logs for more information. {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2483}} [ 662.034649] env[62503]: DEBUG nova.compute.manager [None req-2f620b9e-0fbe-4cd8-a880-2520c6ad4568 tempest-DeleteServersAdminTestJSON-318637377 tempest-DeleteServersAdminTestJSON-318637377-project-member] [instance: 73142a3f-3be8-4956-90f8-6ca223d2d01f] Unplugging VIFs for instance {{(pid=62503) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3009}} [ 662.035028] env[62503]: DEBUG oslo_concurrency.lockutils [None req-2f620b9e-0fbe-4cd8-a880-2520c6ad4568 tempest-DeleteServersAdminTestJSON-318637377 tempest-DeleteServersAdminTestJSON-318637377-project-member] Acquiring lock "refresh_cache-73142a3f-3be8-4956-90f8-6ca223d2d01f" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 662.035209] env[62503]: DEBUG oslo_concurrency.lockutils [None req-2f620b9e-0fbe-4cd8-a880-2520c6ad4568 tempest-DeleteServersAdminTestJSON-318637377 tempest-DeleteServersAdminTestJSON-318637377-project-member] Acquired lock "refresh_cache-73142a3f-3be8-4956-90f8-6ca223d2d01f" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 662.035396] env[62503]: DEBUG nova.network.neutron [None req-2f620b9e-0fbe-4cd8-a880-2520c6ad4568 tempest-DeleteServersAdminTestJSON-318637377 tempest-DeleteServersAdminTestJSON-318637377-project-member] [instance: 73142a3f-3be8-4956-90f8-6ca223d2d01f] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 662.102537] env[62503]: DEBUG nova.compute.manager [None req-810913a5-54c9-4615-8817-f2307b8447be tempest-AttachVolumeTestJSON-1238928908 tempest-AttachVolumeTestJSON-1238928908-project-member] [instance: 2aa7880f-de24-4f32-b027-731a2030f987] Start spawning the instance on the hypervisor. {{(pid=62503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2641}} [ 662.128042] env[62503]: DEBUG nova.virt.hardware [None req-810913a5-54c9-4615-8817-f2307b8447be tempest-AttachVolumeTestJSON-1238928908 tempest-AttachVolumeTestJSON-1238928908-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:26:20Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:26:03Z,direct_url=,disk_format='vmdk',id=8150ca02-f879-471d-8913-459408f127a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26d9ee75d25b4018b8bb1fb11c8bd98c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:26:04Z,virtual_size=,visibility=), allow threads: False {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 662.128042] env[62503]: DEBUG nova.virt.hardware [None req-810913a5-54c9-4615-8817-f2307b8447be tempest-AttachVolumeTestJSON-1238928908 tempest-AttachVolumeTestJSON-1238928908-project-member] Flavor limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 662.128042] env[62503]: DEBUG nova.virt.hardware [None req-810913a5-54c9-4615-8817-f2307b8447be tempest-AttachVolumeTestJSON-1238928908 tempest-AttachVolumeTestJSON-1238928908-project-member] Image limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 662.128241] env[62503]: DEBUG nova.virt.hardware [None req-810913a5-54c9-4615-8817-f2307b8447be tempest-AttachVolumeTestJSON-1238928908 tempest-AttachVolumeTestJSON-1238928908-project-member] Flavor pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 662.128241] env[62503]: DEBUG nova.virt.hardware [None req-810913a5-54c9-4615-8817-f2307b8447be tempest-AttachVolumeTestJSON-1238928908 tempest-AttachVolumeTestJSON-1238928908-project-member] Image pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 662.128241] env[62503]: DEBUG nova.virt.hardware [None req-810913a5-54c9-4615-8817-f2307b8447be tempest-AttachVolumeTestJSON-1238928908 tempest-AttachVolumeTestJSON-1238928908-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 662.128341] env[62503]: DEBUG nova.virt.hardware [None req-810913a5-54c9-4615-8817-f2307b8447be tempest-AttachVolumeTestJSON-1238928908 tempest-AttachVolumeTestJSON-1238928908-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 662.128522] env[62503]: DEBUG nova.virt.hardware [None req-810913a5-54c9-4615-8817-f2307b8447be tempest-AttachVolumeTestJSON-1238928908 tempest-AttachVolumeTestJSON-1238928908-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 662.128694] env[62503]: DEBUG nova.virt.hardware [None req-810913a5-54c9-4615-8817-f2307b8447be tempest-AttachVolumeTestJSON-1238928908 tempest-AttachVolumeTestJSON-1238928908-project-member] Got 1 possible topologies {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 662.128855] env[62503]: DEBUG nova.virt.hardware [None req-810913a5-54c9-4615-8817-f2307b8447be tempest-AttachVolumeTestJSON-1238928908 tempest-AttachVolumeTestJSON-1238928908-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 662.129047] env[62503]: DEBUG nova.virt.hardware [None req-810913a5-54c9-4615-8817-f2307b8447be tempest-AttachVolumeTestJSON-1238928908 tempest-AttachVolumeTestJSON-1238928908-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 662.129969] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c607ecb-36f7-4999-b886-03c1947d6cf0 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.139138] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-006bd3bd-60fd-4b2d-b676-fbbaba0b83d4 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.153218] env[62503]: ERROR nova.compute.manager [None req-810913a5-54c9-4615-8817-f2307b8447be tempest-AttachVolumeTestJSON-1238928908 tempest-AttachVolumeTestJSON-1238928908-project-member] [instance: 2aa7880f-de24-4f32-b027-731a2030f987] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 18f34f69-8a6c-4c87-af89-d119386b3019, please check neutron logs for more information. [ 662.153218] env[62503]: ERROR nova.compute.manager [instance: 2aa7880f-de24-4f32-b027-731a2030f987] Traceback (most recent call last): [ 662.153218] env[62503]: ERROR nova.compute.manager [instance: 2aa7880f-de24-4f32-b027-731a2030f987] File "/opt/stack/nova/nova/compute/manager.py", line 2897, in _build_resources [ 662.153218] env[62503]: ERROR nova.compute.manager [instance: 2aa7880f-de24-4f32-b027-731a2030f987] yield resources [ 662.153218] env[62503]: ERROR nova.compute.manager [instance: 2aa7880f-de24-4f32-b027-731a2030f987] File "/opt/stack/nova/nova/compute/manager.py", line 2644, in _build_and_run_instance [ 662.153218] env[62503]: ERROR nova.compute.manager [instance: 2aa7880f-de24-4f32-b027-731a2030f987] self.driver.spawn(context, instance, image_meta, [ 662.153218] env[62503]: ERROR nova.compute.manager [instance: 2aa7880f-de24-4f32-b027-731a2030f987] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 662.153218] env[62503]: ERROR nova.compute.manager [instance: 2aa7880f-de24-4f32-b027-731a2030f987] self._vmops.spawn(context, instance, image_meta, injected_files, [ 662.153218] env[62503]: ERROR nova.compute.manager [instance: 2aa7880f-de24-4f32-b027-731a2030f987] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 662.153218] env[62503]: ERROR nova.compute.manager [instance: 2aa7880f-de24-4f32-b027-731a2030f987] vm_ref = self.build_virtual_machine(instance, [ 662.153218] env[62503]: ERROR nova.compute.manager [instance: 2aa7880f-de24-4f32-b027-731a2030f987] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 662.153549] env[62503]: ERROR nova.compute.manager [instance: 2aa7880f-de24-4f32-b027-731a2030f987] vif_infos = vmwarevif.get_vif_info(self._session, [ 662.153549] env[62503]: ERROR nova.compute.manager [instance: 2aa7880f-de24-4f32-b027-731a2030f987] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 662.153549] env[62503]: ERROR nova.compute.manager [instance: 2aa7880f-de24-4f32-b027-731a2030f987] for vif in network_info: [ 662.153549] env[62503]: ERROR nova.compute.manager [instance: 2aa7880f-de24-4f32-b027-731a2030f987] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 662.153549] env[62503]: ERROR nova.compute.manager [instance: 2aa7880f-de24-4f32-b027-731a2030f987] return self._sync_wrapper(fn, *args, **kwargs) [ 662.153549] env[62503]: ERROR nova.compute.manager [instance: 2aa7880f-de24-4f32-b027-731a2030f987] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 662.153549] env[62503]: ERROR nova.compute.manager [instance: 2aa7880f-de24-4f32-b027-731a2030f987] self.wait() [ 662.153549] env[62503]: ERROR nova.compute.manager [instance: 2aa7880f-de24-4f32-b027-731a2030f987] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 662.153549] env[62503]: ERROR nova.compute.manager [instance: 2aa7880f-de24-4f32-b027-731a2030f987] self[:] = self._gt.wait() [ 662.153549] env[62503]: ERROR nova.compute.manager [instance: 2aa7880f-de24-4f32-b027-731a2030f987] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 662.153549] env[62503]: ERROR nova.compute.manager [instance: 2aa7880f-de24-4f32-b027-731a2030f987] return self._exit_event.wait() [ 662.153549] env[62503]: ERROR nova.compute.manager [instance: 2aa7880f-de24-4f32-b027-731a2030f987] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 662.153549] env[62503]: ERROR nova.compute.manager [instance: 2aa7880f-de24-4f32-b027-731a2030f987] current.throw(*self._exc) [ 662.153850] env[62503]: ERROR nova.compute.manager [instance: 2aa7880f-de24-4f32-b027-731a2030f987] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 662.153850] env[62503]: ERROR nova.compute.manager [instance: 2aa7880f-de24-4f32-b027-731a2030f987] result = function(*args, **kwargs) [ 662.153850] env[62503]: ERROR nova.compute.manager [instance: 2aa7880f-de24-4f32-b027-731a2030f987] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 662.153850] env[62503]: ERROR nova.compute.manager [instance: 2aa7880f-de24-4f32-b027-731a2030f987] return func(*args, **kwargs) [ 662.153850] env[62503]: ERROR nova.compute.manager [instance: 2aa7880f-de24-4f32-b027-731a2030f987] File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 662.153850] env[62503]: ERROR nova.compute.manager [instance: 2aa7880f-de24-4f32-b027-731a2030f987] raise e [ 662.153850] env[62503]: ERROR nova.compute.manager [instance: 2aa7880f-de24-4f32-b027-731a2030f987] File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 662.153850] env[62503]: ERROR nova.compute.manager [instance: 2aa7880f-de24-4f32-b027-731a2030f987] nwinfo = self.network_api.allocate_for_instance( [ 662.153850] env[62503]: ERROR nova.compute.manager [instance: 2aa7880f-de24-4f32-b027-731a2030f987] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 662.153850] env[62503]: ERROR nova.compute.manager [instance: 2aa7880f-de24-4f32-b027-731a2030f987] created_port_ids = self._update_ports_for_instance( [ 662.153850] env[62503]: ERROR nova.compute.manager [instance: 2aa7880f-de24-4f32-b027-731a2030f987] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 662.153850] env[62503]: ERROR nova.compute.manager [instance: 2aa7880f-de24-4f32-b027-731a2030f987] with excutils.save_and_reraise_exception(): [ 662.153850] env[62503]: ERROR nova.compute.manager [instance: 2aa7880f-de24-4f32-b027-731a2030f987] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 662.154519] env[62503]: ERROR nova.compute.manager [instance: 2aa7880f-de24-4f32-b027-731a2030f987] self.force_reraise() [ 662.154519] env[62503]: ERROR nova.compute.manager [instance: 2aa7880f-de24-4f32-b027-731a2030f987] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 662.154519] env[62503]: ERROR nova.compute.manager [instance: 2aa7880f-de24-4f32-b027-731a2030f987] raise self.value [ 662.154519] env[62503]: ERROR nova.compute.manager [instance: 2aa7880f-de24-4f32-b027-731a2030f987] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 662.154519] env[62503]: ERROR nova.compute.manager [instance: 2aa7880f-de24-4f32-b027-731a2030f987] updated_port = self._update_port( [ 662.154519] env[62503]: ERROR nova.compute.manager [instance: 2aa7880f-de24-4f32-b027-731a2030f987] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 662.154519] env[62503]: ERROR nova.compute.manager [instance: 2aa7880f-de24-4f32-b027-731a2030f987] _ensure_no_port_binding_failure(port) [ 662.154519] env[62503]: ERROR nova.compute.manager [instance: 2aa7880f-de24-4f32-b027-731a2030f987] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 662.154519] env[62503]: ERROR nova.compute.manager [instance: 2aa7880f-de24-4f32-b027-731a2030f987] raise exception.PortBindingFailed(port_id=port['id']) [ 662.154519] env[62503]: ERROR nova.compute.manager [instance: 2aa7880f-de24-4f32-b027-731a2030f987] nova.exception.PortBindingFailed: Binding failed for port 18f34f69-8a6c-4c87-af89-d119386b3019, please check neutron logs for more information. [ 662.154519] env[62503]: ERROR nova.compute.manager [instance: 2aa7880f-de24-4f32-b027-731a2030f987] [ 662.154519] env[62503]: INFO nova.compute.manager [None req-810913a5-54c9-4615-8817-f2307b8447be tempest-AttachVolumeTestJSON-1238928908 tempest-AttachVolumeTestJSON-1238928908-project-member] [instance: 2aa7880f-de24-4f32-b027-731a2030f987] Terminating instance [ 662.157833] env[62503]: DEBUG oslo_concurrency.lockutils [None req-810913a5-54c9-4615-8817-f2307b8447be tempest-AttachVolumeTestJSON-1238928908 tempest-AttachVolumeTestJSON-1238928908-project-member] Acquiring lock "refresh_cache-2aa7880f-de24-4f32-b027-731a2030f987" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 662.470876] env[62503]: DEBUG nova.network.neutron [req-9a1a5936-e336-4e70-8beb-c7d9439021b2 req-b6d81a2f-9a7c-4008-9b1f-0ba440b87e8b service nova] [instance: 2aa7880f-de24-4f32-b027-731a2030f987] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 662.481029] env[62503]: DEBUG oslo_concurrency.lockutils [None req-f5db60c5-fc27-48c4-aa82-4f7bd2779675 tempest-ServersTestBootFromVolume-938742055 tempest-ServersTestBootFromVolume-938742055-project-member] Releasing lock "refresh_cache-5ca1a33c-7324-481c-95cd-3761ce8ccf13" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 662.481792] env[62503]: DEBUG nova.compute.manager [None req-f5db60c5-fc27-48c4-aa82-4f7bd2779675 tempest-ServersTestBootFromVolume-938742055 tempest-ServersTestBootFromVolume-938742055-project-member] [instance: 5ca1a33c-7324-481c-95cd-3761ce8ccf13] Start destroying the instance on the hypervisor. {{(pid=62503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3156}} [ 662.482224] env[62503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3fc0a7a7-4833-46ea-ad5c-72520b69d8cd {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.493868] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1dcafcc-691f-4c41-b6bf-1b8a48562d33 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.526136] env[62503]: WARNING nova.virt.vmwareapi.driver [None req-f5db60c5-fc27-48c4-aa82-4f7bd2779675 tempest-ServersTestBootFromVolume-938742055 tempest-ServersTestBootFromVolume-938742055-project-member] [instance: 5ca1a33c-7324-481c-95cd-3761ce8ccf13] Instance does not exists. Proceeding to delete instance properties on datastore: nova.exception.InstanceNotFound: Instance 5ca1a33c-7324-481c-95cd-3761ce8ccf13 could not be found. [ 662.526429] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-f5db60c5-fc27-48c4-aa82-4f7bd2779675 tempest-ServersTestBootFromVolume-938742055 tempest-ServersTestBootFromVolume-938742055-project-member] [instance: 5ca1a33c-7324-481c-95cd-3761ce8ccf13] Destroying instance {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 662.526803] env[62503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-74d60ed6-5b4d-4b51-b279-4920eb3b112c {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.537569] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4e86a3b-f884-47e1-ab03-3645d8d6ef79 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.559160] env[62503]: DEBUG nova.network.neutron [req-9a1a5936-e336-4e70-8beb-c7d9439021b2 req-b6d81a2f-9a7c-4008-9b1f-0ba440b87e8b service nova] [instance: 2aa7880f-de24-4f32-b027-731a2030f987] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 662.573847] env[62503]: WARNING nova.virt.vmwareapi.vmops [None req-f5db60c5-fc27-48c4-aa82-4f7bd2779675 tempest-ServersTestBootFromVolume-938742055 tempest-ServersTestBootFromVolume-938742055-project-member] [instance: 5ca1a33c-7324-481c-95cd-3761ce8ccf13] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 5ca1a33c-7324-481c-95cd-3761ce8ccf13 could not be found. [ 662.574067] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-f5db60c5-fc27-48c4-aa82-4f7bd2779675 tempest-ServersTestBootFromVolume-938742055 tempest-ServersTestBootFromVolume-938742055-project-member] [instance: 5ca1a33c-7324-481c-95cd-3761ce8ccf13] Instance destroyed {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 662.574250] env[62503]: INFO nova.compute.manager [None req-f5db60c5-fc27-48c4-aa82-4f7bd2779675 tempest-ServersTestBootFromVolume-938742055 tempest-ServersTestBootFromVolume-938742055-project-member] [instance: 5ca1a33c-7324-481c-95cd-3761ce8ccf13] Took 0.09 seconds to destroy the instance on the hypervisor. [ 662.574480] env[62503]: DEBUG oslo.service.loopingcall [None req-f5db60c5-fc27-48c4-aa82-4f7bd2779675 tempest-ServersTestBootFromVolume-938742055 tempest-ServersTestBootFromVolume-938742055-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 662.575172] env[62503]: DEBUG nova.network.neutron [None req-2f620b9e-0fbe-4cd8-a880-2520c6ad4568 tempest-DeleteServersAdminTestJSON-318637377 tempest-DeleteServersAdminTestJSON-318637377-project-member] [instance: 73142a3f-3be8-4956-90f8-6ca223d2d01f] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 662.576702] env[62503]: DEBUG nova.compute.manager [-] [instance: 5ca1a33c-7324-481c-95cd-3761ce8ccf13] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 662.576801] env[62503]: DEBUG nova.network.neutron [-] [instance: 5ca1a33c-7324-481c-95cd-3761ce8ccf13] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 662.590498] env[62503]: DEBUG nova.network.neutron [-] [instance: 5ca1a33c-7324-481c-95cd-3761ce8ccf13] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 662.623604] env[62503]: DEBUG nova.network.neutron [None req-2f620b9e-0fbe-4cd8-a880-2520c6ad4568 tempest-DeleteServersAdminTestJSON-318637377 tempest-DeleteServersAdminTestJSON-318637377-project-member] [instance: 73142a3f-3be8-4956-90f8-6ca223d2d01f] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 663.062176] env[62503]: DEBUG oslo_concurrency.lockutils [req-9a1a5936-e336-4e70-8beb-c7d9439021b2 req-b6d81a2f-9a7c-4008-9b1f-0ba440b87e8b service nova] Releasing lock "refresh_cache-2aa7880f-de24-4f32-b027-731a2030f987" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 663.062563] env[62503]: DEBUG oslo_concurrency.lockutils [None req-810913a5-54c9-4615-8817-f2307b8447be tempest-AttachVolumeTestJSON-1238928908 tempest-AttachVolumeTestJSON-1238928908-project-member] Acquired lock "refresh_cache-2aa7880f-de24-4f32-b027-731a2030f987" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 663.062745] env[62503]: DEBUG nova.network.neutron [None req-810913a5-54c9-4615-8817-f2307b8447be tempest-AttachVolumeTestJSON-1238928908 tempest-AttachVolumeTestJSON-1238928908-project-member] [instance: 2aa7880f-de24-4f32-b027-731a2030f987] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 663.093169] env[62503]: DEBUG nova.network.neutron [-] [instance: 5ca1a33c-7324-481c-95cd-3761ce8ccf13] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 663.126051] env[62503]: DEBUG oslo_concurrency.lockutils [None req-2f620b9e-0fbe-4cd8-a880-2520c6ad4568 tempest-DeleteServersAdminTestJSON-318637377 tempest-DeleteServersAdminTestJSON-318637377-project-member] Releasing lock "refresh_cache-73142a3f-3be8-4956-90f8-6ca223d2d01f" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 663.126282] env[62503]: DEBUG nova.compute.manager [None req-2f620b9e-0fbe-4cd8-a880-2520c6ad4568 tempest-DeleteServersAdminTestJSON-318637377 tempest-DeleteServersAdminTestJSON-318637377-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62503) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3032}} [ 663.126475] env[62503]: DEBUG nova.compute.manager [None req-2f620b9e-0fbe-4cd8-a880-2520c6ad4568 tempest-DeleteServersAdminTestJSON-318637377 tempest-DeleteServersAdminTestJSON-318637377-project-member] [instance: 73142a3f-3be8-4956-90f8-6ca223d2d01f] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 663.126603] env[62503]: DEBUG nova.network.neutron [None req-2f620b9e-0fbe-4cd8-a880-2520c6ad4568 tempest-DeleteServersAdminTestJSON-318637377 tempest-DeleteServersAdminTestJSON-318637377-project-member] [instance: 73142a3f-3be8-4956-90f8-6ca223d2d01f] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 663.143369] env[62503]: DEBUG nova.network.neutron [None req-2f620b9e-0fbe-4cd8-a880-2520c6ad4568 tempest-DeleteServersAdminTestJSON-318637377 tempest-DeleteServersAdminTestJSON-318637377-project-member] [instance: 73142a3f-3be8-4956-90f8-6ca223d2d01f] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 663.453152] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43d7631d-9b5c-467c-8406-95b50696866e {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.460944] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46406dc5-6998-4fe7-9830-06f50d367022 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.489924] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a94241f-e63e-44cc-bc94-18b6da80f1db {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.497066] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63565cdf-e55a-4a6a-bc48-16acf59db57a {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.510922] env[62503]: DEBUG nova.compute.provider_tree [None req-04b5e2fe-420a-4980-828f-5aaaaac6b6d6 tempest-MigrationsAdminTest-1483146718 tempest-MigrationsAdminTest-1483146718-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 663.581215] env[62503]: DEBUG nova.network.neutron [None req-810913a5-54c9-4615-8817-f2307b8447be tempest-AttachVolumeTestJSON-1238928908 tempest-AttachVolumeTestJSON-1238928908-project-member] [instance: 2aa7880f-de24-4f32-b027-731a2030f987] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 663.596068] env[62503]: INFO nova.compute.manager [-] [instance: 5ca1a33c-7324-481c-95cd-3761ce8ccf13] Took 1.02 seconds to deallocate network for instance. [ 663.648796] env[62503]: DEBUG nova.network.neutron [None req-2f620b9e-0fbe-4cd8-a880-2520c6ad4568 tempest-DeleteServersAdminTestJSON-318637377 tempest-DeleteServersAdminTestJSON-318637377-project-member] [instance: 73142a3f-3be8-4956-90f8-6ca223d2d01f] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 663.651526] env[62503]: DEBUG nova.network.neutron [None req-810913a5-54c9-4615-8817-f2307b8447be tempest-AttachVolumeTestJSON-1238928908 tempest-AttachVolumeTestJSON-1238928908-project-member] [instance: 2aa7880f-de24-4f32-b027-731a2030f987] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 663.982964] env[62503]: DEBUG nova.compute.manager [req-2b7ccc7a-3fdd-48e0-b09a-7e227198557f req-1b6fb3b4-cb77-416d-9d7e-afd2d9d44e5e service nova] [instance: 2aa7880f-de24-4f32-b027-731a2030f987] Received event network-vif-deleted-18f34f69-8a6c-4c87-af89-d119386b3019 {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 664.014287] env[62503]: DEBUG nova.scheduler.client.report [None req-04b5e2fe-420a-4980-828f-5aaaaac6b6d6 tempest-MigrationsAdminTest-1483146718 tempest-MigrationsAdminTest-1483146718-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 664.148268] env[62503]: INFO nova.compute.manager [None req-f5db60c5-fc27-48c4-aa82-4f7bd2779675 tempest-ServersTestBootFromVolume-938742055 tempest-ServersTestBootFromVolume-938742055-project-member] [instance: 5ca1a33c-7324-481c-95cd-3761ce8ccf13] Took 0.55 seconds to detach 1 volumes for instance. [ 664.150414] env[62503]: DEBUG nova.compute.claims [None req-f5db60c5-fc27-48c4-aa82-4f7bd2779675 tempest-ServersTestBootFromVolume-938742055 tempest-ServersTestBootFromVolume-938742055-project-member] [instance: 5ca1a33c-7324-481c-95cd-3761ce8ccf13] Aborting claim: {{(pid=62503) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 664.150595] env[62503]: DEBUG oslo_concurrency.lockutils [None req-f5db60c5-fc27-48c4-aa82-4f7bd2779675 tempest-ServersTestBootFromVolume-938742055 tempest-ServersTestBootFromVolume-938742055-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 664.151055] env[62503]: INFO nova.compute.manager [None req-2f620b9e-0fbe-4cd8-a880-2520c6ad4568 tempest-DeleteServersAdminTestJSON-318637377 tempest-DeleteServersAdminTestJSON-318637377-project-member] [instance: 73142a3f-3be8-4956-90f8-6ca223d2d01f] Took 1.02 seconds to deallocate network for instance. [ 664.153489] env[62503]: DEBUG oslo_concurrency.lockutils [None req-810913a5-54c9-4615-8817-f2307b8447be tempest-AttachVolumeTestJSON-1238928908 tempest-AttachVolumeTestJSON-1238928908-project-member] Releasing lock "refresh_cache-2aa7880f-de24-4f32-b027-731a2030f987" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 664.153908] env[62503]: DEBUG nova.compute.manager [None req-810913a5-54c9-4615-8817-f2307b8447be tempest-AttachVolumeTestJSON-1238928908 tempest-AttachVolumeTestJSON-1238928908-project-member] [instance: 2aa7880f-de24-4f32-b027-731a2030f987] Start destroying the instance on the hypervisor. {{(pid=62503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3156}} [ 664.154111] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-810913a5-54c9-4615-8817-f2307b8447be tempest-AttachVolumeTestJSON-1238928908 tempest-AttachVolumeTestJSON-1238928908-project-member] [instance: 2aa7880f-de24-4f32-b027-731a2030f987] Destroying instance {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 664.154535] env[62503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ed611dee-0493-424b-bbce-8afe1ec395a9 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.163229] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5706b419-4626-4314-9529-c4c26a6a1217 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.186439] env[62503]: WARNING nova.virt.vmwareapi.vmops [None req-810913a5-54c9-4615-8817-f2307b8447be tempest-AttachVolumeTestJSON-1238928908 tempest-AttachVolumeTestJSON-1238928908-project-member] [instance: 2aa7880f-de24-4f32-b027-731a2030f987] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 2aa7880f-de24-4f32-b027-731a2030f987 could not be found. [ 664.186625] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-810913a5-54c9-4615-8817-f2307b8447be tempest-AttachVolumeTestJSON-1238928908 tempest-AttachVolumeTestJSON-1238928908-project-member] [instance: 2aa7880f-de24-4f32-b027-731a2030f987] Instance destroyed {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 664.186809] env[62503]: INFO nova.compute.manager [None req-810913a5-54c9-4615-8817-f2307b8447be tempest-AttachVolumeTestJSON-1238928908 tempest-AttachVolumeTestJSON-1238928908-project-member] [instance: 2aa7880f-de24-4f32-b027-731a2030f987] Took 0.03 seconds to destroy the instance on the hypervisor. [ 664.187134] env[62503]: DEBUG oslo.service.loopingcall [None req-810913a5-54c9-4615-8817-f2307b8447be tempest-AttachVolumeTestJSON-1238928908 tempest-AttachVolumeTestJSON-1238928908-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 664.187992] env[62503]: DEBUG nova.compute.manager [-] [instance: 2aa7880f-de24-4f32-b027-731a2030f987] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 664.188115] env[62503]: DEBUG nova.network.neutron [-] [instance: 2aa7880f-de24-4f32-b027-731a2030f987] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 664.202709] env[62503]: DEBUG nova.network.neutron [-] [instance: 2aa7880f-de24-4f32-b027-731a2030f987] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 664.519671] env[62503]: DEBUG oslo_concurrency.lockutils [None req-04b5e2fe-420a-4980-828f-5aaaaac6b6d6 tempest-MigrationsAdminTest-1483146718 tempest-MigrationsAdminTest-1483146718-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.490s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 664.520259] env[62503]: DEBUG nova.compute.manager [None req-04b5e2fe-420a-4980-828f-5aaaaac6b6d6 tempest-MigrationsAdminTest-1483146718 tempest-MigrationsAdminTest-1483146718-project-member] [instance: 0af1e65d-ca88-475e-a871-4087bd49cd9d] Start building networks asynchronously for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2832}} [ 664.523487] env[62503]: DEBUG oslo_concurrency.lockutils [None req-ffbee50c-b7fd-4096-9606-99a0d4599842 tempest-ServersTestJSON-1104182753 tempest-ServersTestJSON-1104182753-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.251s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 664.526313] env[62503]: INFO nova.compute.claims [None req-ffbee50c-b7fd-4096-9606-99a0d4599842 tempest-ServersTestJSON-1104182753 tempest-ServersTestJSON-1104182753-project-member] [instance: d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 664.704977] env[62503]: DEBUG nova.network.neutron [-] [instance: 2aa7880f-de24-4f32-b027-731a2030f987] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 665.032562] env[62503]: DEBUG nova.compute.utils [None req-04b5e2fe-420a-4980-828f-5aaaaac6b6d6 tempest-MigrationsAdminTest-1483146718 tempest-MigrationsAdminTest-1483146718-project-member] Using /dev/sd instead of None {{(pid=62503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 665.033886] env[62503]: DEBUG nova.compute.manager [None req-04b5e2fe-420a-4980-828f-5aaaaac6b6d6 tempest-MigrationsAdminTest-1483146718 tempest-MigrationsAdminTest-1483146718-project-member] [instance: 0af1e65d-ca88-475e-a871-4087bd49cd9d] Allocating IP information in the background. {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 665.034071] env[62503]: DEBUG nova.network.neutron [None req-04b5e2fe-420a-4980-828f-5aaaaac6b6d6 tempest-MigrationsAdminTest-1483146718 tempest-MigrationsAdminTest-1483146718-project-member] [instance: 0af1e65d-ca88-475e-a871-4087bd49cd9d] allocate_for_instance() {{(pid=62503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 665.093264] env[62503]: DEBUG nova.policy [None req-04b5e2fe-420a-4980-828f-5aaaaac6b6d6 tempest-MigrationsAdminTest-1483146718 tempest-MigrationsAdminTest-1483146718-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '858793651d2f47148e2e3485f96d28d8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8c8b0006ec874fccae43f59e907fa6ac', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62503) authorize /opt/stack/nova/nova/policy.py:201}} [ 665.182645] env[62503]: INFO nova.scheduler.client.report [None req-2f620b9e-0fbe-4cd8-a880-2520c6ad4568 tempest-DeleteServersAdminTestJSON-318637377 tempest-DeleteServersAdminTestJSON-318637377-project-member] Deleted allocations for instance 73142a3f-3be8-4956-90f8-6ca223d2d01f [ 665.207703] env[62503]: INFO nova.compute.manager [-] [instance: 2aa7880f-de24-4f32-b027-731a2030f987] Took 1.02 seconds to deallocate network for instance. [ 665.209695] env[62503]: DEBUG nova.compute.claims [None req-810913a5-54c9-4615-8817-f2307b8447be tempest-AttachVolumeTestJSON-1238928908 tempest-AttachVolumeTestJSON-1238928908-project-member] [instance: 2aa7880f-de24-4f32-b027-731a2030f987] Aborting claim: {{(pid=62503) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 665.209901] env[62503]: DEBUG oslo_concurrency.lockutils [None req-810913a5-54c9-4615-8817-f2307b8447be tempest-AttachVolumeTestJSON-1238928908 tempest-AttachVolumeTestJSON-1238928908-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 665.419508] env[62503]: DEBUG nova.network.neutron [None req-04b5e2fe-420a-4980-828f-5aaaaac6b6d6 tempest-MigrationsAdminTest-1483146718 tempest-MigrationsAdminTest-1483146718-project-member] [instance: 0af1e65d-ca88-475e-a871-4087bd49cd9d] Successfully created port: 16aeed73-bb69-49c1-a84f-c3bcca7abfad {{(pid=62503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 665.540237] env[62503]: DEBUG nova.compute.manager [None req-04b5e2fe-420a-4980-828f-5aaaaac6b6d6 tempest-MigrationsAdminTest-1483146718 tempest-MigrationsAdminTest-1483146718-project-member] [instance: 0af1e65d-ca88-475e-a871-4087bd49cd9d] Start building block device mappings for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2867}} [ 665.691509] env[62503]: DEBUG oslo_concurrency.lockutils [None req-2f620b9e-0fbe-4cd8-a880-2520c6ad4568 tempest-DeleteServersAdminTestJSON-318637377 tempest-DeleteServersAdminTestJSON-318637377-project-member] Lock "73142a3f-3be8-4956-90f8-6ca223d2d01f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 90.532s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 665.986218] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-deb1f09f-73d1-4468-b225-f438cb126b1c {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.996132] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1657eed9-a68b-4ce8-82c6-ae8b82a0dc90 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.023351] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-492af382-cf0c-4e5e-b06d-73a5e16ca101 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.030899] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2edea826-b3b6-4336-8245-f096fe57b905 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.044171] env[62503]: DEBUG nova.compute.provider_tree [None req-ffbee50c-b7fd-4096-9606-99a0d4599842 tempest-ServersTestJSON-1104182753 tempest-ServersTestJSON-1104182753-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 666.194023] env[62503]: DEBUG nova.compute.manager [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] [instance: f244a7c9-2f39-4f91-aeba-e5f36e7f79ef] Starting instance... {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 666.275657] env[62503]: DEBUG nova.compute.manager [req-64a687bd-7a7a-46ff-a730-ab3caadf202c req-d60959c2-7b6e-4b89-92d1-721b4b213c0c service nova] [instance: 0af1e65d-ca88-475e-a871-4087bd49cd9d] Received event network-changed-16aeed73-bb69-49c1-a84f-c3bcca7abfad {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 666.275836] env[62503]: DEBUG nova.compute.manager [req-64a687bd-7a7a-46ff-a730-ab3caadf202c req-d60959c2-7b6e-4b89-92d1-721b4b213c0c service nova] [instance: 0af1e65d-ca88-475e-a871-4087bd49cd9d] Refreshing instance network info cache due to event network-changed-16aeed73-bb69-49c1-a84f-c3bcca7abfad. {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11432}} [ 666.276065] env[62503]: DEBUG oslo_concurrency.lockutils [req-64a687bd-7a7a-46ff-a730-ab3caadf202c req-d60959c2-7b6e-4b89-92d1-721b4b213c0c service nova] Acquiring lock "refresh_cache-0af1e65d-ca88-475e-a871-4087bd49cd9d" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 666.276211] env[62503]: DEBUG oslo_concurrency.lockutils [req-64a687bd-7a7a-46ff-a730-ab3caadf202c req-d60959c2-7b6e-4b89-92d1-721b4b213c0c service nova] Acquired lock "refresh_cache-0af1e65d-ca88-475e-a871-4087bd49cd9d" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 666.276366] env[62503]: DEBUG nova.network.neutron [req-64a687bd-7a7a-46ff-a730-ab3caadf202c req-d60959c2-7b6e-4b89-92d1-721b4b213c0c service nova] [instance: 0af1e65d-ca88-475e-a871-4087bd49cd9d] Refreshing network info cache for port 16aeed73-bb69-49c1-a84f-c3bcca7abfad {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 666.410226] env[62503]: ERROR nova.compute.manager [None req-04b5e2fe-420a-4980-828f-5aaaaac6b6d6 tempest-MigrationsAdminTest-1483146718 tempest-MigrationsAdminTest-1483146718-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 16aeed73-bb69-49c1-a84f-c3bcca7abfad, please check neutron logs for more information. [ 666.410226] env[62503]: ERROR nova.compute.manager Traceback (most recent call last): [ 666.410226] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 666.410226] env[62503]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 666.410226] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 666.410226] env[62503]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 666.410226] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 666.410226] env[62503]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 666.410226] env[62503]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 666.410226] env[62503]: ERROR nova.compute.manager self.force_reraise() [ 666.410226] env[62503]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 666.410226] env[62503]: ERROR nova.compute.manager raise self.value [ 666.410226] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 666.410226] env[62503]: ERROR nova.compute.manager updated_port = self._update_port( [ 666.410226] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 666.410226] env[62503]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 666.410740] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 666.410740] env[62503]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 666.410740] env[62503]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 16aeed73-bb69-49c1-a84f-c3bcca7abfad, please check neutron logs for more information. [ 666.410740] env[62503]: ERROR nova.compute.manager [ 666.410740] env[62503]: Traceback (most recent call last): [ 666.410740] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 666.410740] env[62503]: listener.cb(fileno) [ 666.410740] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 666.410740] env[62503]: result = function(*args, **kwargs) [ 666.410740] env[62503]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 666.410740] env[62503]: return func(*args, **kwargs) [ 666.410740] env[62503]: File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 666.410740] env[62503]: raise e [ 666.410740] env[62503]: File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 666.410740] env[62503]: nwinfo = self.network_api.allocate_for_instance( [ 666.410740] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 666.410740] env[62503]: created_port_ids = self._update_ports_for_instance( [ 666.410740] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 666.410740] env[62503]: with excutils.save_and_reraise_exception(): [ 666.410740] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 666.410740] env[62503]: self.force_reraise() [ 666.410740] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 666.410740] env[62503]: raise self.value [ 666.410740] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 666.410740] env[62503]: updated_port = self._update_port( [ 666.410740] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 666.410740] env[62503]: _ensure_no_port_binding_failure(port) [ 666.410740] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 666.410740] env[62503]: raise exception.PortBindingFailed(port_id=port['id']) [ 666.411555] env[62503]: nova.exception.PortBindingFailed: Binding failed for port 16aeed73-bb69-49c1-a84f-c3bcca7abfad, please check neutron logs for more information. [ 666.411555] env[62503]: Removing descriptor: 16 [ 666.547348] env[62503]: DEBUG nova.scheduler.client.report [None req-ffbee50c-b7fd-4096-9606-99a0d4599842 tempest-ServersTestJSON-1104182753 tempest-ServersTestJSON-1104182753-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 666.551411] env[62503]: DEBUG nova.compute.manager [None req-04b5e2fe-420a-4980-828f-5aaaaac6b6d6 tempest-MigrationsAdminTest-1483146718 tempest-MigrationsAdminTest-1483146718-project-member] [instance: 0af1e65d-ca88-475e-a871-4087bd49cd9d] Start spawning the instance on the hypervisor. {{(pid=62503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2641}} [ 666.576278] env[62503]: DEBUG nova.virt.hardware [None req-04b5e2fe-420a-4980-828f-5aaaaac6b6d6 tempest-MigrationsAdminTest-1483146718 tempest-MigrationsAdminTest-1483146718-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='ead61835-98bc-4b00-b2c2-f0d2e9cdd803',id=36,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-222670044',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:26:03Z,direct_url=,disk_format='vmdk',id=8150ca02-f879-471d-8913-459408f127a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26d9ee75d25b4018b8bb1fb11c8bd98c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:26:04Z,virtual_size=,visibility=), allow threads: False {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 666.576521] env[62503]: DEBUG nova.virt.hardware [None req-04b5e2fe-420a-4980-828f-5aaaaac6b6d6 tempest-MigrationsAdminTest-1483146718 tempest-MigrationsAdminTest-1483146718-project-member] Flavor limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 666.576675] env[62503]: DEBUG nova.virt.hardware [None req-04b5e2fe-420a-4980-828f-5aaaaac6b6d6 tempest-MigrationsAdminTest-1483146718 tempest-MigrationsAdminTest-1483146718-project-member] Image limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 666.576854] env[62503]: DEBUG nova.virt.hardware [None req-04b5e2fe-420a-4980-828f-5aaaaac6b6d6 tempest-MigrationsAdminTest-1483146718 tempest-MigrationsAdminTest-1483146718-project-member] Flavor pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 666.576998] env[62503]: DEBUG nova.virt.hardware [None req-04b5e2fe-420a-4980-828f-5aaaaac6b6d6 tempest-MigrationsAdminTest-1483146718 tempest-MigrationsAdminTest-1483146718-project-member] Image pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 666.577166] env[62503]: DEBUG nova.virt.hardware [None req-04b5e2fe-420a-4980-828f-5aaaaac6b6d6 tempest-MigrationsAdminTest-1483146718 tempest-MigrationsAdminTest-1483146718-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 666.577366] env[62503]: DEBUG nova.virt.hardware [None req-04b5e2fe-420a-4980-828f-5aaaaac6b6d6 tempest-MigrationsAdminTest-1483146718 tempest-MigrationsAdminTest-1483146718-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 666.577522] env[62503]: DEBUG nova.virt.hardware [None req-04b5e2fe-420a-4980-828f-5aaaaac6b6d6 tempest-MigrationsAdminTest-1483146718 tempest-MigrationsAdminTest-1483146718-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 666.577720] env[62503]: DEBUG nova.virt.hardware [None req-04b5e2fe-420a-4980-828f-5aaaaac6b6d6 tempest-MigrationsAdminTest-1483146718 tempest-MigrationsAdminTest-1483146718-project-member] Got 1 possible topologies {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 666.577889] env[62503]: DEBUG nova.virt.hardware [None req-04b5e2fe-420a-4980-828f-5aaaaac6b6d6 tempest-MigrationsAdminTest-1483146718 tempest-MigrationsAdminTest-1483146718-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 666.578071] env[62503]: DEBUG nova.virt.hardware [None req-04b5e2fe-420a-4980-828f-5aaaaac6b6d6 tempest-MigrationsAdminTest-1483146718 tempest-MigrationsAdminTest-1483146718-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 666.578939] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-834959a6-0fd9-4c60-9eb1-554e91461441 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.587115] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f99ce65-c4a3-4b16-a234-fcc9d7973b33 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.601030] env[62503]: ERROR nova.compute.manager [None req-04b5e2fe-420a-4980-828f-5aaaaac6b6d6 tempest-MigrationsAdminTest-1483146718 tempest-MigrationsAdminTest-1483146718-project-member] [instance: 0af1e65d-ca88-475e-a871-4087bd49cd9d] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 16aeed73-bb69-49c1-a84f-c3bcca7abfad, please check neutron logs for more information. [ 666.601030] env[62503]: ERROR nova.compute.manager [instance: 0af1e65d-ca88-475e-a871-4087bd49cd9d] Traceback (most recent call last): [ 666.601030] env[62503]: ERROR nova.compute.manager [instance: 0af1e65d-ca88-475e-a871-4087bd49cd9d] File "/opt/stack/nova/nova/compute/manager.py", line 2897, in _build_resources [ 666.601030] env[62503]: ERROR nova.compute.manager [instance: 0af1e65d-ca88-475e-a871-4087bd49cd9d] yield resources [ 666.601030] env[62503]: ERROR nova.compute.manager [instance: 0af1e65d-ca88-475e-a871-4087bd49cd9d] File "/opt/stack/nova/nova/compute/manager.py", line 2644, in _build_and_run_instance [ 666.601030] env[62503]: ERROR nova.compute.manager [instance: 0af1e65d-ca88-475e-a871-4087bd49cd9d] self.driver.spawn(context, instance, image_meta, [ 666.601030] env[62503]: ERROR nova.compute.manager [instance: 0af1e65d-ca88-475e-a871-4087bd49cd9d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 666.601030] env[62503]: ERROR nova.compute.manager [instance: 0af1e65d-ca88-475e-a871-4087bd49cd9d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 666.601030] env[62503]: ERROR nova.compute.manager [instance: 0af1e65d-ca88-475e-a871-4087bd49cd9d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 666.601030] env[62503]: ERROR nova.compute.manager [instance: 0af1e65d-ca88-475e-a871-4087bd49cd9d] vm_ref = self.build_virtual_machine(instance, [ 666.601030] env[62503]: ERROR nova.compute.manager [instance: 0af1e65d-ca88-475e-a871-4087bd49cd9d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 666.601373] env[62503]: ERROR nova.compute.manager [instance: 0af1e65d-ca88-475e-a871-4087bd49cd9d] vif_infos = vmwarevif.get_vif_info(self._session, [ 666.601373] env[62503]: ERROR nova.compute.manager [instance: 0af1e65d-ca88-475e-a871-4087bd49cd9d] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 666.601373] env[62503]: ERROR nova.compute.manager [instance: 0af1e65d-ca88-475e-a871-4087bd49cd9d] for vif in network_info: [ 666.601373] env[62503]: ERROR nova.compute.manager [instance: 0af1e65d-ca88-475e-a871-4087bd49cd9d] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 666.601373] env[62503]: ERROR nova.compute.manager [instance: 0af1e65d-ca88-475e-a871-4087bd49cd9d] return self._sync_wrapper(fn, *args, **kwargs) [ 666.601373] env[62503]: ERROR nova.compute.manager [instance: 0af1e65d-ca88-475e-a871-4087bd49cd9d] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 666.601373] env[62503]: ERROR nova.compute.manager [instance: 0af1e65d-ca88-475e-a871-4087bd49cd9d] self.wait() [ 666.601373] env[62503]: ERROR nova.compute.manager [instance: 0af1e65d-ca88-475e-a871-4087bd49cd9d] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 666.601373] env[62503]: ERROR nova.compute.manager [instance: 0af1e65d-ca88-475e-a871-4087bd49cd9d] self[:] = self._gt.wait() [ 666.601373] env[62503]: ERROR nova.compute.manager [instance: 0af1e65d-ca88-475e-a871-4087bd49cd9d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 666.601373] env[62503]: ERROR nova.compute.manager [instance: 0af1e65d-ca88-475e-a871-4087bd49cd9d] return self._exit_event.wait() [ 666.601373] env[62503]: ERROR nova.compute.manager [instance: 0af1e65d-ca88-475e-a871-4087bd49cd9d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 666.601373] env[62503]: ERROR nova.compute.manager [instance: 0af1e65d-ca88-475e-a871-4087bd49cd9d] current.throw(*self._exc) [ 666.601689] env[62503]: ERROR nova.compute.manager [instance: 0af1e65d-ca88-475e-a871-4087bd49cd9d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 666.601689] env[62503]: ERROR nova.compute.manager [instance: 0af1e65d-ca88-475e-a871-4087bd49cd9d] result = function(*args, **kwargs) [ 666.601689] env[62503]: ERROR nova.compute.manager [instance: 0af1e65d-ca88-475e-a871-4087bd49cd9d] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 666.601689] env[62503]: ERROR nova.compute.manager [instance: 0af1e65d-ca88-475e-a871-4087bd49cd9d] return func(*args, **kwargs) [ 666.601689] env[62503]: ERROR nova.compute.manager [instance: 0af1e65d-ca88-475e-a871-4087bd49cd9d] File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 666.601689] env[62503]: ERROR nova.compute.manager [instance: 0af1e65d-ca88-475e-a871-4087bd49cd9d] raise e [ 666.601689] env[62503]: ERROR nova.compute.manager [instance: 0af1e65d-ca88-475e-a871-4087bd49cd9d] File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 666.601689] env[62503]: ERROR nova.compute.manager [instance: 0af1e65d-ca88-475e-a871-4087bd49cd9d] nwinfo = self.network_api.allocate_for_instance( [ 666.601689] env[62503]: ERROR nova.compute.manager [instance: 0af1e65d-ca88-475e-a871-4087bd49cd9d] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 666.601689] env[62503]: ERROR nova.compute.manager [instance: 0af1e65d-ca88-475e-a871-4087bd49cd9d] created_port_ids = self._update_ports_for_instance( [ 666.601689] env[62503]: ERROR nova.compute.manager [instance: 0af1e65d-ca88-475e-a871-4087bd49cd9d] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 666.601689] env[62503]: ERROR nova.compute.manager [instance: 0af1e65d-ca88-475e-a871-4087bd49cd9d] with excutils.save_and_reraise_exception(): [ 666.601689] env[62503]: ERROR nova.compute.manager [instance: 0af1e65d-ca88-475e-a871-4087bd49cd9d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 666.602100] env[62503]: ERROR nova.compute.manager [instance: 0af1e65d-ca88-475e-a871-4087bd49cd9d] self.force_reraise() [ 666.602100] env[62503]: ERROR nova.compute.manager [instance: 0af1e65d-ca88-475e-a871-4087bd49cd9d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 666.602100] env[62503]: ERROR nova.compute.manager [instance: 0af1e65d-ca88-475e-a871-4087bd49cd9d] raise self.value [ 666.602100] env[62503]: ERROR nova.compute.manager [instance: 0af1e65d-ca88-475e-a871-4087bd49cd9d] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 666.602100] env[62503]: ERROR nova.compute.manager [instance: 0af1e65d-ca88-475e-a871-4087bd49cd9d] updated_port = self._update_port( [ 666.602100] env[62503]: ERROR nova.compute.manager [instance: 0af1e65d-ca88-475e-a871-4087bd49cd9d] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 666.602100] env[62503]: ERROR nova.compute.manager [instance: 0af1e65d-ca88-475e-a871-4087bd49cd9d] _ensure_no_port_binding_failure(port) [ 666.602100] env[62503]: ERROR nova.compute.manager [instance: 0af1e65d-ca88-475e-a871-4087bd49cd9d] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 666.602100] env[62503]: ERROR nova.compute.manager [instance: 0af1e65d-ca88-475e-a871-4087bd49cd9d] raise exception.PortBindingFailed(port_id=port['id']) [ 666.602100] env[62503]: ERROR nova.compute.manager [instance: 0af1e65d-ca88-475e-a871-4087bd49cd9d] nova.exception.PortBindingFailed: Binding failed for port 16aeed73-bb69-49c1-a84f-c3bcca7abfad, please check neutron logs for more information. [ 666.602100] env[62503]: ERROR nova.compute.manager [instance: 0af1e65d-ca88-475e-a871-4087bd49cd9d] [ 666.602100] env[62503]: INFO nova.compute.manager [None req-04b5e2fe-420a-4980-828f-5aaaaac6b6d6 tempest-MigrationsAdminTest-1483146718 tempest-MigrationsAdminTest-1483146718-project-member] [instance: 0af1e65d-ca88-475e-a871-4087bd49cd9d] Terminating instance [ 666.603565] env[62503]: DEBUG oslo_concurrency.lockutils [None req-04b5e2fe-420a-4980-828f-5aaaaac6b6d6 tempest-MigrationsAdminTest-1483146718 tempest-MigrationsAdminTest-1483146718-project-member] Acquiring lock "refresh_cache-0af1e65d-ca88-475e-a871-4087bd49cd9d" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 666.720212] env[62503]: DEBUG oslo_concurrency.lockutils [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 666.794956] env[62503]: DEBUG nova.network.neutron [req-64a687bd-7a7a-46ff-a730-ab3caadf202c req-d60959c2-7b6e-4b89-92d1-721b4b213c0c service nova] [instance: 0af1e65d-ca88-475e-a871-4087bd49cd9d] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 666.860611] env[62503]: DEBUG nova.network.neutron [req-64a687bd-7a7a-46ff-a730-ab3caadf202c req-d60959c2-7b6e-4b89-92d1-721b4b213c0c service nova] [instance: 0af1e65d-ca88-475e-a871-4087bd49cd9d] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 667.054935] env[62503]: DEBUG oslo_concurrency.lockutils [None req-ffbee50c-b7fd-4096-9606-99a0d4599842 tempest-ServersTestJSON-1104182753 tempest-ServersTestJSON-1104182753-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.532s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 667.055502] env[62503]: DEBUG nova.compute.manager [None req-ffbee50c-b7fd-4096-9606-99a0d4599842 tempest-ServersTestJSON-1104182753 tempest-ServersTestJSON-1104182753-project-member] [instance: d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e] Start building networks asynchronously for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2832}} [ 667.058223] env[62503]: DEBUG oslo_concurrency.lockutils [None req-6b7abc00-006d-4855-9995-538e6edbbf28 tempest-ServersWithSpecificFlavorTestJSON-213993766 tempest-ServersWithSpecificFlavorTestJSON-213993766-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.816s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 667.363639] env[62503]: DEBUG oslo_concurrency.lockutils [req-64a687bd-7a7a-46ff-a730-ab3caadf202c req-d60959c2-7b6e-4b89-92d1-721b4b213c0c service nova] Releasing lock "refresh_cache-0af1e65d-ca88-475e-a871-4087bd49cd9d" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 667.364090] env[62503]: DEBUG oslo_concurrency.lockutils [None req-04b5e2fe-420a-4980-828f-5aaaaac6b6d6 tempest-MigrationsAdminTest-1483146718 tempest-MigrationsAdminTest-1483146718-project-member] Acquired lock "refresh_cache-0af1e65d-ca88-475e-a871-4087bd49cd9d" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 667.364291] env[62503]: DEBUG nova.network.neutron [None req-04b5e2fe-420a-4980-828f-5aaaaac6b6d6 tempest-MigrationsAdminTest-1483146718 tempest-MigrationsAdminTest-1483146718-project-member] [instance: 0af1e65d-ca88-475e-a871-4087bd49cd9d] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 667.563219] env[62503]: DEBUG nova.compute.utils [None req-ffbee50c-b7fd-4096-9606-99a0d4599842 tempest-ServersTestJSON-1104182753 tempest-ServersTestJSON-1104182753-project-member] Using /dev/sd instead of None {{(pid=62503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 667.567875] env[62503]: DEBUG nova.compute.manager [None req-ffbee50c-b7fd-4096-9606-99a0d4599842 tempest-ServersTestJSON-1104182753 tempest-ServersTestJSON-1104182753-project-member] [instance: d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e] Allocating IP information in the background. {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 667.567875] env[62503]: DEBUG nova.network.neutron [None req-ffbee50c-b7fd-4096-9606-99a0d4599842 tempest-ServersTestJSON-1104182753 tempest-ServersTestJSON-1104182753-project-member] [instance: d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e] allocate_for_instance() {{(pid=62503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 667.604221] env[62503]: DEBUG nova.policy [None req-ffbee50c-b7fd-4096-9606-99a0d4599842 tempest-ServersTestJSON-1104182753 tempest-ServersTestJSON-1104182753-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'be85ed0a52cc4ff6bae3dfa8a7bcb6de', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ceab446cb9624596b9015ac0f5ed14ca', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62503) authorize /opt/stack/nova/nova/policy.py:201}} [ 667.885281] env[62503]: DEBUG nova.network.neutron [None req-04b5e2fe-420a-4980-828f-5aaaaac6b6d6 tempest-MigrationsAdminTest-1483146718 tempest-MigrationsAdminTest-1483146718-project-member] [instance: 0af1e65d-ca88-475e-a871-4087bd49cd9d] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 667.920597] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d092d32-a9c3-4c1e-88ef-0a176f1b281a {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.929594] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d4be1b2-192d-4902-8c8d-ff3f458171e0 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.963330] env[62503]: DEBUG nova.network.neutron [None req-04b5e2fe-420a-4980-828f-5aaaaac6b6d6 tempest-MigrationsAdminTest-1483146718 tempest-MigrationsAdminTest-1483146718-project-member] [instance: 0af1e65d-ca88-475e-a871-4087bd49cd9d] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 667.965735] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5585d0e-71d9-4b51-8081-375f8b371b0a {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.968121] env[62503]: DEBUG nova.network.neutron [None req-ffbee50c-b7fd-4096-9606-99a0d4599842 tempest-ServersTestJSON-1104182753 tempest-ServersTestJSON-1104182753-project-member] [instance: d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e] Successfully created port: 65cd4476-dc4f-47cb-b6bf-f086c9b46323 {{(pid=62503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 667.975735] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78745199-aa48-429e-9d34-baf23284b43e {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.990088] env[62503]: DEBUG nova.compute.provider_tree [None req-6b7abc00-006d-4855-9995-538e6edbbf28 tempest-ServersWithSpecificFlavorTestJSON-213993766 tempest-ServersWithSpecificFlavorTestJSON-213993766-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 668.070820] env[62503]: DEBUG nova.compute.manager [None req-ffbee50c-b7fd-4096-9606-99a0d4599842 tempest-ServersTestJSON-1104182753 tempest-ServersTestJSON-1104182753-project-member] [instance: d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e] Start building block device mappings for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2867}} [ 668.303640] env[62503]: DEBUG nova.compute.manager [req-0bf44ceb-1c6b-4d37-b858-0c8a56f8e9a2 req-ba1ffc93-b335-48d4-9579-f2cdb92810a4 service nova] [instance: 0af1e65d-ca88-475e-a871-4087bd49cd9d] Received event network-vif-deleted-16aeed73-bb69-49c1-a84f-c3bcca7abfad {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 668.470772] env[62503]: DEBUG oslo_concurrency.lockutils [None req-04b5e2fe-420a-4980-828f-5aaaaac6b6d6 tempest-MigrationsAdminTest-1483146718 tempest-MigrationsAdminTest-1483146718-project-member] Releasing lock "refresh_cache-0af1e65d-ca88-475e-a871-4087bd49cd9d" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 668.471220] env[62503]: DEBUG nova.compute.manager [None req-04b5e2fe-420a-4980-828f-5aaaaac6b6d6 tempest-MigrationsAdminTest-1483146718 tempest-MigrationsAdminTest-1483146718-project-member] [instance: 0af1e65d-ca88-475e-a871-4087bd49cd9d] Start destroying the instance on the hypervisor. {{(pid=62503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3156}} [ 668.471412] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-04b5e2fe-420a-4980-828f-5aaaaac6b6d6 tempest-MigrationsAdminTest-1483146718 tempest-MigrationsAdminTest-1483146718-project-member] [instance: 0af1e65d-ca88-475e-a871-4087bd49cd9d] Destroying instance {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 668.472419] env[62503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d9e62505-e447-45b2-9d1b-4dbd38e5473e {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.484306] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3c8e226-e549-4571-a827-08474fe58395 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.498642] env[62503]: DEBUG nova.scheduler.client.report [None req-6b7abc00-006d-4855-9995-538e6edbbf28 tempest-ServersWithSpecificFlavorTestJSON-213993766 tempest-ServersWithSpecificFlavorTestJSON-213993766-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 668.517397] env[62503]: WARNING nova.virt.vmwareapi.vmops [None req-04b5e2fe-420a-4980-828f-5aaaaac6b6d6 tempest-MigrationsAdminTest-1483146718 tempest-MigrationsAdminTest-1483146718-project-member] [instance: 0af1e65d-ca88-475e-a871-4087bd49cd9d] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 0af1e65d-ca88-475e-a871-4087bd49cd9d could not be found. [ 668.517710] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-04b5e2fe-420a-4980-828f-5aaaaac6b6d6 tempest-MigrationsAdminTest-1483146718 tempest-MigrationsAdminTest-1483146718-project-member] [instance: 0af1e65d-ca88-475e-a871-4087bd49cd9d] Instance destroyed {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 668.517811] env[62503]: INFO nova.compute.manager [None req-04b5e2fe-420a-4980-828f-5aaaaac6b6d6 tempest-MigrationsAdminTest-1483146718 tempest-MigrationsAdminTest-1483146718-project-member] [instance: 0af1e65d-ca88-475e-a871-4087bd49cd9d] Took 0.05 seconds to destroy the instance on the hypervisor. [ 668.518064] env[62503]: DEBUG oslo.service.loopingcall [None req-04b5e2fe-420a-4980-828f-5aaaaac6b6d6 tempest-MigrationsAdminTest-1483146718 tempest-MigrationsAdminTest-1483146718-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 668.518274] env[62503]: DEBUG nova.compute.manager [-] [instance: 0af1e65d-ca88-475e-a871-4087bd49cd9d] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 668.518363] env[62503]: DEBUG nova.network.neutron [-] [instance: 0af1e65d-ca88-475e-a871-4087bd49cd9d] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 668.533819] env[62503]: DEBUG nova.network.neutron [-] [instance: 0af1e65d-ca88-475e-a871-4087bd49cd9d] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 668.564204] env[62503]: DEBUG oslo_concurrency.lockutils [None req-fc106d5b-533d-42f2-9e23-d15b663f98ba tempest-ServerDiagnosticsNegativeTest-1726197908 tempest-ServerDiagnosticsNegativeTest-1726197908-project-member] Acquiring lock "529e6f8e-49b9-46a7-a09f-17238522f7bc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 668.565242] env[62503]: DEBUG oslo_concurrency.lockutils [None req-fc106d5b-533d-42f2-9e23-d15b663f98ba tempest-ServerDiagnosticsNegativeTest-1726197908 tempest-ServerDiagnosticsNegativeTest-1726197908-project-member] Lock "529e6f8e-49b9-46a7-a09f-17238522f7bc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 668.960495] env[62503]: ERROR nova.compute.manager [None req-ffbee50c-b7fd-4096-9606-99a0d4599842 tempest-ServersTestJSON-1104182753 tempest-ServersTestJSON-1104182753-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 65cd4476-dc4f-47cb-b6bf-f086c9b46323, please check neutron logs for more information. [ 668.960495] env[62503]: ERROR nova.compute.manager Traceback (most recent call last): [ 668.960495] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 668.960495] env[62503]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 668.960495] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 668.960495] env[62503]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 668.960495] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 668.960495] env[62503]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 668.960495] env[62503]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 668.960495] env[62503]: ERROR nova.compute.manager self.force_reraise() [ 668.960495] env[62503]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 668.960495] env[62503]: ERROR nova.compute.manager raise self.value [ 668.960495] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 668.960495] env[62503]: ERROR nova.compute.manager updated_port = self._update_port( [ 668.960495] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 668.960495] env[62503]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 668.960992] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 668.960992] env[62503]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 668.960992] env[62503]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 65cd4476-dc4f-47cb-b6bf-f086c9b46323, please check neutron logs for more information. [ 668.960992] env[62503]: ERROR nova.compute.manager [ 668.960992] env[62503]: Traceback (most recent call last): [ 668.960992] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 668.960992] env[62503]: listener.cb(fileno) [ 668.960992] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 668.960992] env[62503]: result = function(*args, **kwargs) [ 668.960992] env[62503]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 668.960992] env[62503]: return func(*args, **kwargs) [ 668.960992] env[62503]: File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 668.960992] env[62503]: raise e [ 668.960992] env[62503]: File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 668.960992] env[62503]: nwinfo = self.network_api.allocate_for_instance( [ 668.960992] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 668.960992] env[62503]: created_port_ids = self._update_ports_for_instance( [ 668.960992] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 668.960992] env[62503]: with excutils.save_and_reraise_exception(): [ 668.960992] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 668.960992] env[62503]: self.force_reraise() [ 668.960992] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 668.960992] env[62503]: raise self.value [ 668.960992] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 668.960992] env[62503]: updated_port = self._update_port( [ 668.960992] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 668.960992] env[62503]: _ensure_no_port_binding_failure(port) [ 668.960992] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 668.960992] env[62503]: raise exception.PortBindingFailed(port_id=port['id']) [ 668.961860] env[62503]: nova.exception.PortBindingFailed: Binding failed for port 65cd4476-dc4f-47cb-b6bf-f086c9b46323, please check neutron logs for more information. [ 668.961860] env[62503]: Removing descriptor: 16 [ 669.004788] env[62503]: DEBUG oslo_concurrency.lockutils [None req-6b7abc00-006d-4855-9995-538e6edbbf28 tempest-ServersWithSpecificFlavorTestJSON-213993766 tempest-ServersWithSpecificFlavorTestJSON-213993766-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.946s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 669.005444] env[62503]: ERROR nova.compute.manager [None req-6b7abc00-006d-4855-9995-538e6edbbf28 tempest-ServersWithSpecificFlavorTestJSON-213993766 tempest-ServersWithSpecificFlavorTestJSON-213993766-project-member] [instance: 5a1af72f-71c8-42de-aa71-f011d85210a5] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 2e596441-f659-4fc5-bd1e-0075ab21ae3b, please check neutron logs for more information. [ 669.005444] env[62503]: ERROR nova.compute.manager [instance: 5a1af72f-71c8-42de-aa71-f011d85210a5] Traceback (most recent call last): [ 669.005444] env[62503]: ERROR nova.compute.manager [instance: 5a1af72f-71c8-42de-aa71-f011d85210a5] File "/opt/stack/nova/nova/compute/manager.py", line 2644, in _build_and_run_instance [ 669.005444] env[62503]: ERROR nova.compute.manager [instance: 5a1af72f-71c8-42de-aa71-f011d85210a5] self.driver.spawn(context, instance, image_meta, [ 669.005444] env[62503]: ERROR nova.compute.manager [instance: 5a1af72f-71c8-42de-aa71-f011d85210a5] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 669.005444] env[62503]: ERROR nova.compute.manager [instance: 5a1af72f-71c8-42de-aa71-f011d85210a5] self._vmops.spawn(context, instance, image_meta, injected_files, [ 669.005444] env[62503]: ERROR nova.compute.manager [instance: 5a1af72f-71c8-42de-aa71-f011d85210a5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 669.005444] env[62503]: ERROR nova.compute.manager [instance: 5a1af72f-71c8-42de-aa71-f011d85210a5] vm_ref = self.build_virtual_machine(instance, [ 669.005444] env[62503]: ERROR nova.compute.manager [instance: 5a1af72f-71c8-42de-aa71-f011d85210a5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 669.005444] env[62503]: ERROR nova.compute.manager [instance: 5a1af72f-71c8-42de-aa71-f011d85210a5] vif_infos = vmwarevif.get_vif_info(self._session, [ 669.005444] env[62503]: ERROR nova.compute.manager [instance: 5a1af72f-71c8-42de-aa71-f011d85210a5] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 669.005937] env[62503]: ERROR nova.compute.manager [instance: 5a1af72f-71c8-42de-aa71-f011d85210a5] for vif in network_info: [ 669.005937] env[62503]: ERROR nova.compute.manager [instance: 5a1af72f-71c8-42de-aa71-f011d85210a5] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 669.005937] env[62503]: ERROR nova.compute.manager [instance: 5a1af72f-71c8-42de-aa71-f011d85210a5] return self._sync_wrapper(fn, *args, **kwargs) [ 669.005937] env[62503]: ERROR nova.compute.manager [instance: 5a1af72f-71c8-42de-aa71-f011d85210a5] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 669.005937] env[62503]: ERROR nova.compute.manager [instance: 5a1af72f-71c8-42de-aa71-f011d85210a5] self.wait() [ 669.005937] env[62503]: ERROR nova.compute.manager [instance: 5a1af72f-71c8-42de-aa71-f011d85210a5] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 669.005937] env[62503]: ERROR nova.compute.manager [instance: 5a1af72f-71c8-42de-aa71-f011d85210a5] self[:] = self._gt.wait() [ 669.005937] env[62503]: ERROR nova.compute.manager [instance: 5a1af72f-71c8-42de-aa71-f011d85210a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 669.005937] env[62503]: ERROR nova.compute.manager [instance: 5a1af72f-71c8-42de-aa71-f011d85210a5] return self._exit_event.wait() [ 669.005937] env[62503]: ERROR nova.compute.manager [instance: 5a1af72f-71c8-42de-aa71-f011d85210a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 669.005937] env[62503]: ERROR nova.compute.manager [instance: 5a1af72f-71c8-42de-aa71-f011d85210a5] result = hub.switch() [ 669.005937] env[62503]: ERROR nova.compute.manager [instance: 5a1af72f-71c8-42de-aa71-f011d85210a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 669.005937] env[62503]: ERROR nova.compute.manager [instance: 5a1af72f-71c8-42de-aa71-f011d85210a5] return self.greenlet.switch() [ 669.006370] env[62503]: ERROR nova.compute.manager [instance: 5a1af72f-71c8-42de-aa71-f011d85210a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 669.006370] env[62503]: ERROR nova.compute.manager [instance: 5a1af72f-71c8-42de-aa71-f011d85210a5] result = function(*args, **kwargs) [ 669.006370] env[62503]: ERROR nova.compute.manager [instance: 5a1af72f-71c8-42de-aa71-f011d85210a5] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 669.006370] env[62503]: ERROR nova.compute.manager [instance: 5a1af72f-71c8-42de-aa71-f011d85210a5] return func(*args, **kwargs) [ 669.006370] env[62503]: ERROR nova.compute.manager [instance: 5a1af72f-71c8-42de-aa71-f011d85210a5] File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 669.006370] env[62503]: ERROR nova.compute.manager [instance: 5a1af72f-71c8-42de-aa71-f011d85210a5] raise e [ 669.006370] env[62503]: ERROR nova.compute.manager [instance: 5a1af72f-71c8-42de-aa71-f011d85210a5] File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 669.006370] env[62503]: ERROR nova.compute.manager [instance: 5a1af72f-71c8-42de-aa71-f011d85210a5] nwinfo = self.network_api.allocate_for_instance( [ 669.006370] env[62503]: ERROR nova.compute.manager [instance: 5a1af72f-71c8-42de-aa71-f011d85210a5] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 669.006370] env[62503]: ERROR nova.compute.manager [instance: 5a1af72f-71c8-42de-aa71-f011d85210a5] created_port_ids = self._update_ports_for_instance( [ 669.006370] env[62503]: ERROR nova.compute.manager [instance: 5a1af72f-71c8-42de-aa71-f011d85210a5] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 669.006370] env[62503]: ERROR nova.compute.manager [instance: 5a1af72f-71c8-42de-aa71-f011d85210a5] with excutils.save_and_reraise_exception(): [ 669.006370] env[62503]: ERROR nova.compute.manager [instance: 5a1af72f-71c8-42de-aa71-f011d85210a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 669.006676] env[62503]: ERROR nova.compute.manager [instance: 5a1af72f-71c8-42de-aa71-f011d85210a5] self.force_reraise() [ 669.006676] env[62503]: ERROR nova.compute.manager [instance: 5a1af72f-71c8-42de-aa71-f011d85210a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 669.006676] env[62503]: ERROR nova.compute.manager [instance: 5a1af72f-71c8-42de-aa71-f011d85210a5] raise self.value [ 669.006676] env[62503]: ERROR nova.compute.manager [instance: 5a1af72f-71c8-42de-aa71-f011d85210a5] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 669.006676] env[62503]: ERROR nova.compute.manager [instance: 5a1af72f-71c8-42de-aa71-f011d85210a5] updated_port = self._update_port( [ 669.006676] env[62503]: ERROR nova.compute.manager [instance: 5a1af72f-71c8-42de-aa71-f011d85210a5] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 669.006676] env[62503]: ERROR nova.compute.manager [instance: 5a1af72f-71c8-42de-aa71-f011d85210a5] _ensure_no_port_binding_failure(port) [ 669.006676] env[62503]: ERROR nova.compute.manager [instance: 5a1af72f-71c8-42de-aa71-f011d85210a5] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 669.006676] env[62503]: ERROR nova.compute.manager [instance: 5a1af72f-71c8-42de-aa71-f011d85210a5] raise exception.PortBindingFailed(port_id=port['id']) [ 669.006676] env[62503]: ERROR nova.compute.manager [instance: 5a1af72f-71c8-42de-aa71-f011d85210a5] nova.exception.PortBindingFailed: Binding failed for port 2e596441-f659-4fc5-bd1e-0075ab21ae3b, please check neutron logs for more information. [ 669.006676] env[62503]: ERROR nova.compute.manager [instance: 5a1af72f-71c8-42de-aa71-f011d85210a5] [ 669.007011] env[62503]: DEBUG nova.compute.utils [None req-6b7abc00-006d-4855-9995-538e6edbbf28 tempest-ServersWithSpecificFlavorTestJSON-213993766 tempest-ServersWithSpecificFlavorTestJSON-213993766-project-member] [instance: 5a1af72f-71c8-42de-aa71-f011d85210a5] Binding failed for port 2e596441-f659-4fc5-bd1e-0075ab21ae3b, please check neutron logs for more information. {{(pid=62503) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 669.007387] env[62503]: DEBUG oslo_concurrency.lockutils [None req-fa417a80-55e3-4106-aabc-7d0e3e09907d tempest-ListImageFiltersTestJSON-1898919175 tempest-ListImageFiltersTestJSON-1898919175-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 16.721s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 669.010353] env[62503]: DEBUG nova.compute.manager [None req-6b7abc00-006d-4855-9995-538e6edbbf28 tempest-ServersWithSpecificFlavorTestJSON-213993766 tempest-ServersWithSpecificFlavorTestJSON-213993766-project-member] [instance: 5a1af72f-71c8-42de-aa71-f011d85210a5] Build of instance 5a1af72f-71c8-42de-aa71-f011d85210a5 was re-scheduled: Binding failed for port 2e596441-f659-4fc5-bd1e-0075ab21ae3b, please check neutron logs for more information. {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2483}} [ 669.010784] env[62503]: DEBUG nova.compute.manager [None req-6b7abc00-006d-4855-9995-538e6edbbf28 tempest-ServersWithSpecificFlavorTestJSON-213993766 tempest-ServersWithSpecificFlavorTestJSON-213993766-project-member] [instance: 5a1af72f-71c8-42de-aa71-f011d85210a5] Unplugging VIFs for instance {{(pid=62503) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3009}} [ 669.011008] env[62503]: DEBUG oslo_concurrency.lockutils [None req-6b7abc00-006d-4855-9995-538e6edbbf28 tempest-ServersWithSpecificFlavorTestJSON-213993766 tempest-ServersWithSpecificFlavorTestJSON-213993766-project-member] Acquiring lock "refresh_cache-5a1af72f-71c8-42de-aa71-f011d85210a5" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 669.011162] env[62503]: DEBUG oslo_concurrency.lockutils [None req-6b7abc00-006d-4855-9995-538e6edbbf28 tempest-ServersWithSpecificFlavorTestJSON-213993766 tempest-ServersWithSpecificFlavorTestJSON-213993766-project-member] Acquired lock "refresh_cache-5a1af72f-71c8-42de-aa71-f011d85210a5" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 669.011315] env[62503]: DEBUG nova.network.neutron [None req-6b7abc00-006d-4855-9995-538e6edbbf28 tempest-ServersWithSpecificFlavorTestJSON-213993766 tempest-ServersWithSpecificFlavorTestJSON-213993766-project-member] [instance: 5a1af72f-71c8-42de-aa71-f011d85210a5] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 669.037717] env[62503]: DEBUG nova.network.neutron [-] [instance: 0af1e65d-ca88-475e-a871-4087bd49cd9d] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 669.085726] env[62503]: DEBUG nova.compute.manager [None req-ffbee50c-b7fd-4096-9606-99a0d4599842 tempest-ServersTestJSON-1104182753 tempest-ServersTestJSON-1104182753-project-member] [instance: d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e] Start spawning the instance on the hypervisor. {{(pid=62503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2641}} [ 669.111148] env[62503]: DEBUG nova.virt.hardware [None req-ffbee50c-b7fd-4096-9606-99a0d4599842 tempest-ServersTestJSON-1104182753 tempest-ServersTestJSON-1104182753-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:26:20Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:26:03Z,direct_url=,disk_format='vmdk',id=8150ca02-f879-471d-8913-459408f127a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26d9ee75d25b4018b8bb1fb11c8bd98c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:26:04Z,virtual_size=,visibility=), allow threads: False {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 669.111622] env[62503]: DEBUG nova.virt.hardware [None req-ffbee50c-b7fd-4096-9606-99a0d4599842 tempest-ServersTestJSON-1104182753 tempest-ServersTestJSON-1104182753-project-member] Flavor limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 669.111622] env[62503]: DEBUG nova.virt.hardware [None req-ffbee50c-b7fd-4096-9606-99a0d4599842 tempest-ServersTestJSON-1104182753 tempest-ServersTestJSON-1104182753-project-member] Image limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 669.111755] env[62503]: DEBUG nova.virt.hardware [None req-ffbee50c-b7fd-4096-9606-99a0d4599842 tempest-ServersTestJSON-1104182753 tempest-ServersTestJSON-1104182753-project-member] Flavor pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 669.111900] env[62503]: DEBUG nova.virt.hardware [None req-ffbee50c-b7fd-4096-9606-99a0d4599842 tempest-ServersTestJSON-1104182753 tempest-ServersTestJSON-1104182753-project-member] Image pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 669.112055] env[62503]: DEBUG nova.virt.hardware [None req-ffbee50c-b7fd-4096-9606-99a0d4599842 tempest-ServersTestJSON-1104182753 tempest-ServersTestJSON-1104182753-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 669.112267] env[62503]: DEBUG nova.virt.hardware [None req-ffbee50c-b7fd-4096-9606-99a0d4599842 tempest-ServersTestJSON-1104182753 tempest-ServersTestJSON-1104182753-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 669.112426] env[62503]: DEBUG nova.virt.hardware [None req-ffbee50c-b7fd-4096-9606-99a0d4599842 tempest-ServersTestJSON-1104182753 tempest-ServersTestJSON-1104182753-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 669.112590] env[62503]: DEBUG nova.virt.hardware [None req-ffbee50c-b7fd-4096-9606-99a0d4599842 tempest-ServersTestJSON-1104182753 tempest-ServersTestJSON-1104182753-project-member] Got 1 possible topologies {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 669.112752] env[62503]: DEBUG nova.virt.hardware [None req-ffbee50c-b7fd-4096-9606-99a0d4599842 tempest-ServersTestJSON-1104182753 tempest-ServersTestJSON-1104182753-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 669.112918] env[62503]: DEBUG nova.virt.hardware [None req-ffbee50c-b7fd-4096-9606-99a0d4599842 tempest-ServersTestJSON-1104182753 tempest-ServersTestJSON-1104182753-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 669.113778] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab0aa855-9cb3-4c3b-8e18-03f14089240e {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.122009] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9c0b26b-a2e2-44fa-91b1-eed85920bd30 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.135414] env[62503]: ERROR nova.compute.manager [None req-ffbee50c-b7fd-4096-9606-99a0d4599842 tempest-ServersTestJSON-1104182753 tempest-ServersTestJSON-1104182753-project-member] [instance: d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 65cd4476-dc4f-47cb-b6bf-f086c9b46323, please check neutron logs for more information. [ 669.135414] env[62503]: ERROR nova.compute.manager [instance: d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e] Traceback (most recent call last): [ 669.135414] env[62503]: ERROR nova.compute.manager [instance: d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e] File "/opt/stack/nova/nova/compute/manager.py", line 2897, in _build_resources [ 669.135414] env[62503]: ERROR nova.compute.manager [instance: d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e] yield resources [ 669.135414] env[62503]: ERROR nova.compute.manager [instance: d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e] File "/opt/stack/nova/nova/compute/manager.py", line 2644, in _build_and_run_instance [ 669.135414] env[62503]: ERROR nova.compute.manager [instance: d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e] self.driver.spawn(context, instance, image_meta, [ 669.135414] env[62503]: ERROR nova.compute.manager [instance: d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 669.135414] env[62503]: ERROR nova.compute.manager [instance: d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 669.135414] env[62503]: ERROR nova.compute.manager [instance: d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 669.135414] env[62503]: ERROR nova.compute.manager [instance: d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e] vm_ref = self.build_virtual_machine(instance, [ 669.135414] env[62503]: ERROR nova.compute.manager [instance: d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 669.135800] env[62503]: ERROR nova.compute.manager [instance: d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e] vif_infos = vmwarevif.get_vif_info(self._session, [ 669.135800] env[62503]: ERROR nova.compute.manager [instance: d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 669.135800] env[62503]: ERROR nova.compute.manager [instance: d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e] for vif in network_info: [ 669.135800] env[62503]: ERROR nova.compute.manager [instance: d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 669.135800] env[62503]: ERROR nova.compute.manager [instance: d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e] return self._sync_wrapper(fn, *args, **kwargs) [ 669.135800] env[62503]: ERROR nova.compute.manager [instance: d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 669.135800] env[62503]: ERROR nova.compute.manager [instance: d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e] self.wait() [ 669.135800] env[62503]: ERROR nova.compute.manager [instance: d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 669.135800] env[62503]: ERROR nova.compute.manager [instance: d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e] self[:] = self._gt.wait() [ 669.135800] env[62503]: ERROR nova.compute.manager [instance: d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 669.135800] env[62503]: ERROR nova.compute.manager [instance: d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e] return self._exit_event.wait() [ 669.135800] env[62503]: ERROR nova.compute.manager [instance: d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 669.135800] env[62503]: ERROR nova.compute.manager [instance: d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e] current.throw(*self._exc) [ 669.136182] env[62503]: ERROR nova.compute.manager [instance: d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 669.136182] env[62503]: ERROR nova.compute.manager [instance: d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e] result = function(*args, **kwargs) [ 669.136182] env[62503]: ERROR nova.compute.manager [instance: d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 669.136182] env[62503]: ERROR nova.compute.manager [instance: d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e] return func(*args, **kwargs) [ 669.136182] env[62503]: ERROR nova.compute.manager [instance: d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e] File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 669.136182] env[62503]: ERROR nova.compute.manager [instance: d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e] raise e [ 669.136182] env[62503]: ERROR nova.compute.manager [instance: d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e] File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 669.136182] env[62503]: ERROR nova.compute.manager [instance: d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e] nwinfo = self.network_api.allocate_for_instance( [ 669.136182] env[62503]: ERROR nova.compute.manager [instance: d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 669.136182] env[62503]: ERROR nova.compute.manager [instance: d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e] created_port_ids = self._update_ports_for_instance( [ 669.136182] env[62503]: ERROR nova.compute.manager [instance: d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 669.136182] env[62503]: ERROR nova.compute.manager [instance: d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e] with excutils.save_and_reraise_exception(): [ 669.136182] env[62503]: ERROR nova.compute.manager [instance: d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 669.136576] env[62503]: ERROR nova.compute.manager [instance: d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e] self.force_reraise() [ 669.136576] env[62503]: ERROR nova.compute.manager [instance: d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 669.136576] env[62503]: ERROR nova.compute.manager [instance: d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e] raise self.value [ 669.136576] env[62503]: ERROR nova.compute.manager [instance: d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 669.136576] env[62503]: ERROR nova.compute.manager [instance: d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e] updated_port = self._update_port( [ 669.136576] env[62503]: ERROR nova.compute.manager [instance: d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 669.136576] env[62503]: ERROR nova.compute.manager [instance: d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e] _ensure_no_port_binding_failure(port) [ 669.136576] env[62503]: ERROR nova.compute.manager [instance: d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 669.136576] env[62503]: ERROR nova.compute.manager [instance: d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e] raise exception.PortBindingFailed(port_id=port['id']) [ 669.136576] env[62503]: ERROR nova.compute.manager [instance: d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e] nova.exception.PortBindingFailed: Binding failed for port 65cd4476-dc4f-47cb-b6bf-f086c9b46323, please check neutron logs for more information. [ 669.136576] env[62503]: ERROR nova.compute.manager [instance: d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e] [ 669.136576] env[62503]: INFO nova.compute.manager [None req-ffbee50c-b7fd-4096-9606-99a0d4599842 tempest-ServersTestJSON-1104182753 tempest-ServersTestJSON-1104182753-project-member] [instance: d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e] Terminating instance [ 669.137847] env[62503]: DEBUG oslo_concurrency.lockutils [None req-ffbee50c-b7fd-4096-9606-99a0d4599842 tempest-ServersTestJSON-1104182753 tempest-ServersTestJSON-1104182753-project-member] Acquiring lock "refresh_cache-d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 669.138010] env[62503]: DEBUG oslo_concurrency.lockutils [None req-ffbee50c-b7fd-4096-9606-99a0d4599842 tempest-ServersTestJSON-1104182753 tempest-ServersTestJSON-1104182753-project-member] Acquired lock "refresh_cache-d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 669.138199] env[62503]: DEBUG nova.network.neutron [None req-ffbee50c-b7fd-4096-9606-99a0d4599842 tempest-ServersTestJSON-1104182753 tempest-ServersTestJSON-1104182753-project-member] [instance: d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 669.531286] env[62503]: DEBUG nova.network.neutron [None req-6b7abc00-006d-4855-9995-538e6edbbf28 tempest-ServersWithSpecificFlavorTestJSON-213993766 tempest-ServersWithSpecificFlavorTestJSON-213993766-project-member] [instance: 5a1af72f-71c8-42de-aa71-f011d85210a5] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 669.541585] env[62503]: INFO nova.compute.manager [-] [instance: 0af1e65d-ca88-475e-a871-4087bd49cd9d] Took 1.02 seconds to deallocate network for instance. [ 669.543965] env[62503]: DEBUG nova.compute.claims [None req-04b5e2fe-420a-4980-828f-5aaaaac6b6d6 tempest-MigrationsAdminTest-1483146718 tempest-MigrationsAdminTest-1483146718-project-member] [instance: 0af1e65d-ca88-475e-a871-4087bd49cd9d] Aborting claim: {{(pid=62503) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 669.544748] env[62503]: DEBUG oslo_concurrency.lockutils [None req-04b5e2fe-420a-4980-828f-5aaaaac6b6d6 tempest-MigrationsAdminTest-1483146718 tempest-MigrationsAdminTest-1483146718-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 669.617725] env[62503]: DEBUG nova.network.neutron [None req-6b7abc00-006d-4855-9995-538e6edbbf28 tempest-ServersWithSpecificFlavorTestJSON-213993766 tempest-ServersWithSpecificFlavorTestJSON-213993766-project-member] [instance: 5a1af72f-71c8-42de-aa71-f011d85210a5] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 669.656821] env[62503]: DEBUG nova.network.neutron [None req-ffbee50c-b7fd-4096-9606-99a0d4599842 tempest-ServersTestJSON-1104182753 tempest-ServersTestJSON-1104182753-project-member] [instance: d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 669.730373] env[62503]: DEBUG nova.network.neutron [None req-ffbee50c-b7fd-4096-9606-99a0d4599842 tempest-ServersTestJSON-1104182753 tempest-ServersTestJSON-1104182753-project-member] [instance: d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 669.845283] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26eca81f-c586-4a57-98e3-3ad66306225b {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.853972] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb7ccce3-dd2b-4782-bb88-5c8763804379 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.884457] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7250e171-4e91-4478-ad7c-24cc8c670581 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.891815] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25ee9f0a-06a8-4c87-a51a-614beb963110 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.904799] env[62503]: DEBUG nova.compute.provider_tree [None req-fa417a80-55e3-4106-aabc-7d0e3e09907d tempest-ListImageFiltersTestJSON-1898919175 tempest-ListImageFiltersTestJSON-1898919175-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 670.121051] env[62503]: DEBUG oslo_concurrency.lockutils [None req-6b7abc00-006d-4855-9995-538e6edbbf28 tempest-ServersWithSpecificFlavorTestJSON-213993766 tempest-ServersWithSpecificFlavorTestJSON-213993766-project-member] Releasing lock "refresh_cache-5a1af72f-71c8-42de-aa71-f011d85210a5" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 670.121051] env[62503]: DEBUG nova.compute.manager [None req-6b7abc00-006d-4855-9995-538e6edbbf28 tempest-ServersWithSpecificFlavorTestJSON-213993766 tempest-ServersWithSpecificFlavorTestJSON-213993766-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62503) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3032}} [ 670.121412] env[62503]: DEBUG nova.compute.manager [None req-6b7abc00-006d-4855-9995-538e6edbbf28 tempest-ServersWithSpecificFlavorTestJSON-213993766 tempest-ServersWithSpecificFlavorTestJSON-213993766-project-member] [instance: 5a1af72f-71c8-42de-aa71-f011d85210a5] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 670.121412] env[62503]: DEBUG nova.network.neutron [None req-6b7abc00-006d-4855-9995-538e6edbbf28 tempest-ServersWithSpecificFlavorTestJSON-213993766 tempest-ServersWithSpecificFlavorTestJSON-213993766-project-member] [instance: 5a1af72f-71c8-42de-aa71-f011d85210a5] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 670.135247] env[62503]: DEBUG nova.network.neutron [None req-6b7abc00-006d-4855-9995-538e6edbbf28 tempest-ServersWithSpecificFlavorTestJSON-213993766 tempest-ServersWithSpecificFlavorTestJSON-213993766-project-member] [instance: 5a1af72f-71c8-42de-aa71-f011d85210a5] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 670.233543] env[62503]: DEBUG oslo_concurrency.lockutils [None req-ffbee50c-b7fd-4096-9606-99a0d4599842 tempest-ServersTestJSON-1104182753 tempest-ServersTestJSON-1104182753-project-member] Releasing lock "refresh_cache-d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 670.233898] env[62503]: DEBUG nova.compute.manager [None req-ffbee50c-b7fd-4096-9606-99a0d4599842 tempest-ServersTestJSON-1104182753 tempest-ServersTestJSON-1104182753-project-member] [instance: d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e] Start destroying the instance on the hypervisor. {{(pid=62503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3156}} [ 670.233959] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-ffbee50c-b7fd-4096-9606-99a0d4599842 tempest-ServersTestJSON-1104182753 tempest-ServersTestJSON-1104182753-project-member] [instance: d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e] Destroying instance {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 670.234283] env[62503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f76ec1d2-b6d5-4b08-a43e-88226d52eb9e {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.242925] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8da3581-f65b-4c47-9888-5794a5819e94 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.264835] env[62503]: WARNING nova.virt.vmwareapi.vmops [None req-ffbee50c-b7fd-4096-9606-99a0d4599842 tempest-ServersTestJSON-1104182753 tempest-ServersTestJSON-1104182753-project-member] [instance: d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e could not be found. [ 670.265042] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-ffbee50c-b7fd-4096-9606-99a0d4599842 tempest-ServersTestJSON-1104182753 tempest-ServersTestJSON-1104182753-project-member] [instance: d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e] Instance destroyed {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 670.265230] env[62503]: INFO nova.compute.manager [None req-ffbee50c-b7fd-4096-9606-99a0d4599842 tempest-ServersTestJSON-1104182753 tempest-ServersTestJSON-1104182753-project-member] [instance: d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e] Took 0.03 seconds to destroy the instance on the hypervisor. [ 670.265470] env[62503]: DEBUG oslo.service.loopingcall [None req-ffbee50c-b7fd-4096-9606-99a0d4599842 tempest-ServersTestJSON-1104182753 tempest-ServersTestJSON-1104182753-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 670.265702] env[62503]: DEBUG nova.compute.manager [-] [instance: d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 670.265791] env[62503]: DEBUG nova.network.neutron [-] [instance: d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 670.286499] env[62503]: DEBUG nova.network.neutron [-] [instance: d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 670.327703] env[62503]: DEBUG nova.compute.manager [req-4a94a7df-ff7f-49ee-ab30-e6c255a1b47b req-f393aaa3-de08-4fc1-8d67-647572581573 service nova] [instance: d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e] Received event network-changed-65cd4476-dc4f-47cb-b6bf-f086c9b46323 {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 670.327857] env[62503]: DEBUG nova.compute.manager [req-4a94a7df-ff7f-49ee-ab30-e6c255a1b47b req-f393aaa3-de08-4fc1-8d67-647572581573 service nova] [instance: d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e] Refreshing instance network info cache due to event network-changed-65cd4476-dc4f-47cb-b6bf-f086c9b46323. {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11432}} [ 670.327952] env[62503]: DEBUG oslo_concurrency.lockutils [req-4a94a7df-ff7f-49ee-ab30-e6c255a1b47b req-f393aaa3-de08-4fc1-8d67-647572581573 service nova] Acquiring lock "refresh_cache-d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 670.328104] env[62503]: DEBUG oslo_concurrency.lockutils [req-4a94a7df-ff7f-49ee-ab30-e6c255a1b47b req-f393aaa3-de08-4fc1-8d67-647572581573 service nova] Acquired lock "refresh_cache-d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 670.328264] env[62503]: DEBUG nova.network.neutron [req-4a94a7df-ff7f-49ee-ab30-e6c255a1b47b req-f393aaa3-de08-4fc1-8d67-647572581573 service nova] [instance: d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e] Refreshing network info cache for port 65cd4476-dc4f-47cb-b6bf-f086c9b46323 {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 670.408406] env[62503]: DEBUG nova.scheduler.client.report [None req-fa417a80-55e3-4106-aabc-7d0e3e09907d tempest-ListImageFiltersTestJSON-1898919175 tempest-ListImageFiltersTestJSON-1898919175-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 670.638661] env[62503]: DEBUG nova.network.neutron [None req-6b7abc00-006d-4855-9995-538e6edbbf28 tempest-ServersWithSpecificFlavorTestJSON-213993766 tempest-ServersWithSpecificFlavorTestJSON-213993766-project-member] [instance: 5a1af72f-71c8-42de-aa71-f011d85210a5] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 670.789698] env[62503]: DEBUG nova.network.neutron [-] [instance: d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 670.847946] env[62503]: DEBUG nova.network.neutron [req-4a94a7df-ff7f-49ee-ab30-e6c255a1b47b req-f393aaa3-de08-4fc1-8d67-647572581573 service nova] [instance: d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 670.913166] env[62503]: DEBUG oslo_concurrency.lockutils [None req-fa417a80-55e3-4106-aabc-7d0e3e09907d tempest-ListImageFiltersTestJSON-1898919175 tempest-ListImageFiltersTestJSON-1898919175-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.906s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 670.913812] env[62503]: ERROR nova.compute.manager [None req-fa417a80-55e3-4106-aabc-7d0e3e09907d tempest-ListImageFiltersTestJSON-1898919175 tempest-ListImageFiltersTestJSON-1898919175-project-member] [instance: f0fefd82-1670-4bbb-b250-da0c3b6ca3f6] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 73b93393-7850-45ae-977f-e26fb12e6842, please check neutron logs for more information. [ 670.913812] env[62503]: ERROR nova.compute.manager [instance: f0fefd82-1670-4bbb-b250-da0c3b6ca3f6] Traceback (most recent call last): [ 670.913812] env[62503]: ERROR nova.compute.manager [instance: f0fefd82-1670-4bbb-b250-da0c3b6ca3f6] File "/opt/stack/nova/nova/compute/manager.py", line 2644, in _build_and_run_instance [ 670.913812] env[62503]: ERROR nova.compute.manager [instance: f0fefd82-1670-4bbb-b250-da0c3b6ca3f6] self.driver.spawn(context, instance, image_meta, [ 670.913812] env[62503]: ERROR nova.compute.manager [instance: f0fefd82-1670-4bbb-b250-da0c3b6ca3f6] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 670.913812] env[62503]: ERROR nova.compute.manager [instance: f0fefd82-1670-4bbb-b250-da0c3b6ca3f6] self._vmops.spawn(context, instance, image_meta, injected_files, [ 670.913812] env[62503]: ERROR nova.compute.manager [instance: f0fefd82-1670-4bbb-b250-da0c3b6ca3f6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 670.913812] env[62503]: ERROR nova.compute.manager [instance: f0fefd82-1670-4bbb-b250-da0c3b6ca3f6] vm_ref = self.build_virtual_machine(instance, [ 670.913812] env[62503]: ERROR nova.compute.manager [instance: f0fefd82-1670-4bbb-b250-da0c3b6ca3f6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 670.913812] env[62503]: ERROR nova.compute.manager [instance: f0fefd82-1670-4bbb-b250-da0c3b6ca3f6] vif_infos = vmwarevif.get_vif_info(self._session, [ 670.913812] env[62503]: ERROR nova.compute.manager [instance: f0fefd82-1670-4bbb-b250-da0c3b6ca3f6] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 670.914141] env[62503]: ERROR nova.compute.manager [instance: f0fefd82-1670-4bbb-b250-da0c3b6ca3f6] for vif in network_info: [ 670.914141] env[62503]: ERROR nova.compute.manager [instance: f0fefd82-1670-4bbb-b250-da0c3b6ca3f6] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 670.914141] env[62503]: ERROR nova.compute.manager [instance: f0fefd82-1670-4bbb-b250-da0c3b6ca3f6] return self._sync_wrapper(fn, *args, **kwargs) [ 670.914141] env[62503]: ERROR nova.compute.manager [instance: f0fefd82-1670-4bbb-b250-da0c3b6ca3f6] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 670.914141] env[62503]: ERROR nova.compute.manager [instance: f0fefd82-1670-4bbb-b250-da0c3b6ca3f6] self.wait() [ 670.914141] env[62503]: ERROR nova.compute.manager [instance: f0fefd82-1670-4bbb-b250-da0c3b6ca3f6] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 670.914141] env[62503]: ERROR nova.compute.manager [instance: f0fefd82-1670-4bbb-b250-da0c3b6ca3f6] self[:] = self._gt.wait() [ 670.914141] env[62503]: ERROR nova.compute.manager [instance: f0fefd82-1670-4bbb-b250-da0c3b6ca3f6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 670.914141] env[62503]: ERROR nova.compute.manager [instance: f0fefd82-1670-4bbb-b250-da0c3b6ca3f6] return self._exit_event.wait() [ 670.914141] env[62503]: ERROR nova.compute.manager [instance: f0fefd82-1670-4bbb-b250-da0c3b6ca3f6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 670.914141] env[62503]: ERROR nova.compute.manager [instance: f0fefd82-1670-4bbb-b250-da0c3b6ca3f6] result = hub.switch() [ 670.914141] env[62503]: ERROR nova.compute.manager [instance: f0fefd82-1670-4bbb-b250-da0c3b6ca3f6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 670.914141] env[62503]: ERROR nova.compute.manager [instance: f0fefd82-1670-4bbb-b250-da0c3b6ca3f6] return self.greenlet.switch() [ 670.914453] env[62503]: ERROR nova.compute.manager [instance: f0fefd82-1670-4bbb-b250-da0c3b6ca3f6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 670.914453] env[62503]: ERROR nova.compute.manager [instance: f0fefd82-1670-4bbb-b250-da0c3b6ca3f6] result = function(*args, **kwargs) [ 670.914453] env[62503]: ERROR nova.compute.manager [instance: f0fefd82-1670-4bbb-b250-da0c3b6ca3f6] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 670.914453] env[62503]: ERROR nova.compute.manager [instance: f0fefd82-1670-4bbb-b250-da0c3b6ca3f6] return func(*args, **kwargs) [ 670.914453] env[62503]: ERROR nova.compute.manager [instance: f0fefd82-1670-4bbb-b250-da0c3b6ca3f6] File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 670.914453] env[62503]: ERROR nova.compute.manager [instance: f0fefd82-1670-4bbb-b250-da0c3b6ca3f6] raise e [ 670.914453] env[62503]: ERROR nova.compute.manager [instance: f0fefd82-1670-4bbb-b250-da0c3b6ca3f6] File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 670.914453] env[62503]: ERROR nova.compute.manager [instance: f0fefd82-1670-4bbb-b250-da0c3b6ca3f6] nwinfo = self.network_api.allocate_for_instance( [ 670.914453] env[62503]: ERROR nova.compute.manager [instance: f0fefd82-1670-4bbb-b250-da0c3b6ca3f6] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 670.914453] env[62503]: ERROR nova.compute.manager [instance: f0fefd82-1670-4bbb-b250-da0c3b6ca3f6] created_port_ids = self._update_ports_for_instance( [ 670.914453] env[62503]: ERROR nova.compute.manager [instance: f0fefd82-1670-4bbb-b250-da0c3b6ca3f6] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 670.914453] env[62503]: ERROR nova.compute.manager [instance: f0fefd82-1670-4bbb-b250-da0c3b6ca3f6] with excutils.save_and_reraise_exception(): [ 670.914453] env[62503]: ERROR nova.compute.manager [instance: f0fefd82-1670-4bbb-b250-da0c3b6ca3f6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 670.914761] env[62503]: ERROR nova.compute.manager [instance: f0fefd82-1670-4bbb-b250-da0c3b6ca3f6] self.force_reraise() [ 670.914761] env[62503]: ERROR nova.compute.manager [instance: f0fefd82-1670-4bbb-b250-da0c3b6ca3f6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 670.914761] env[62503]: ERROR nova.compute.manager [instance: f0fefd82-1670-4bbb-b250-da0c3b6ca3f6] raise self.value [ 670.914761] env[62503]: ERROR nova.compute.manager [instance: f0fefd82-1670-4bbb-b250-da0c3b6ca3f6] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 670.914761] env[62503]: ERROR nova.compute.manager [instance: f0fefd82-1670-4bbb-b250-da0c3b6ca3f6] updated_port = self._update_port( [ 670.914761] env[62503]: ERROR nova.compute.manager [instance: f0fefd82-1670-4bbb-b250-da0c3b6ca3f6] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 670.914761] env[62503]: ERROR nova.compute.manager [instance: f0fefd82-1670-4bbb-b250-da0c3b6ca3f6] _ensure_no_port_binding_failure(port) [ 670.914761] env[62503]: ERROR nova.compute.manager [instance: f0fefd82-1670-4bbb-b250-da0c3b6ca3f6] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 670.914761] env[62503]: ERROR nova.compute.manager [instance: f0fefd82-1670-4bbb-b250-da0c3b6ca3f6] raise exception.PortBindingFailed(port_id=port['id']) [ 670.914761] env[62503]: ERROR nova.compute.manager [instance: f0fefd82-1670-4bbb-b250-da0c3b6ca3f6] nova.exception.PortBindingFailed: Binding failed for port 73b93393-7850-45ae-977f-e26fb12e6842, please check neutron logs for more information. [ 670.914761] env[62503]: ERROR nova.compute.manager [instance: f0fefd82-1670-4bbb-b250-da0c3b6ca3f6] [ 670.915025] env[62503]: DEBUG nova.compute.utils [None req-fa417a80-55e3-4106-aabc-7d0e3e09907d tempest-ListImageFiltersTestJSON-1898919175 tempest-ListImageFiltersTestJSON-1898919175-project-member] [instance: f0fefd82-1670-4bbb-b250-da0c3b6ca3f6] Binding failed for port 73b93393-7850-45ae-977f-e26fb12e6842, please check neutron logs for more information. {{(pid=62503) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 670.916256] env[62503]: DEBUG nova.compute.manager [None req-fa417a80-55e3-4106-aabc-7d0e3e09907d tempest-ListImageFiltersTestJSON-1898919175 tempest-ListImageFiltersTestJSON-1898919175-project-member] [instance: f0fefd82-1670-4bbb-b250-da0c3b6ca3f6] Build of instance f0fefd82-1670-4bbb-b250-da0c3b6ca3f6 was re-scheduled: Binding failed for port 73b93393-7850-45ae-977f-e26fb12e6842, please check neutron logs for more information. {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2483}} [ 670.916745] env[62503]: DEBUG nova.compute.manager [None req-fa417a80-55e3-4106-aabc-7d0e3e09907d tempest-ListImageFiltersTestJSON-1898919175 tempest-ListImageFiltersTestJSON-1898919175-project-member] [instance: f0fefd82-1670-4bbb-b250-da0c3b6ca3f6] Unplugging VIFs for instance {{(pid=62503) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3009}} [ 670.916963] env[62503]: DEBUG oslo_concurrency.lockutils [None req-fa417a80-55e3-4106-aabc-7d0e3e09907d tempest-ListImageFiltersTestJSON-1898919175 tempest-ListImageFiltersTestJSON-1898919175-project-member] Acquiring lock "refresh_cache-f0fefd82-1670-4bbb-b250-da0c3b6ca3f6" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 670.917186] env[62503]: DEBUG oslo_concurrency.lockutils [None req-fa417a80-55e3-4106-aabc-7d0e3e09907d tempest-ListImageFiltersTestJSON-1898919175 tempest-ListImageFiltersTestJSON-1898919175-project-member] Acquired lock "refresh_cache-f0fefd82-1670-4bbb-b250-da0c3b6ca3f6" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 670.917284] env[62503]: DEBUG nova.network.neutron [None req-fa417a80-55e3-4106-aabc-7d0e3e09907d tempest-ListImageFiltersTestJSON-1898919175 tempest-ListImageFiltersTestJSON-1898919175-project-member] [instance: f0fefd82-1670-4bbb-b250-da0c3b6ca3f6] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 670.918330] env[62503]: DEBUG oslo_concurrency.lockutils [None req-856c8a58-cdb0-4f72-ae8c-c9a975e5795d tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 16.639s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 671.082844] env[62503]: DEBUG nova.network.neutron [req-4a94a7df-ff7f-49ee-ab30-e6c255a1b47b req-f393aaa3-de08-4fc1-8d67-647572581573 service nova] [instance: d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 671.141390] env[62503]: INFO nova.compute.manager [None req-6b7abc00-006d-4855-9995-538e6edbbf28 tempest-ServersWithSpecificFlavorTestJSON-213993766 tempest-ServersWithSpecificFlavorTestJSON-213993766-project-member] [instance: 5a1af72f-71c8-42de-aa71-f011d85210a5] Took 1.02 seconds to deallocate network for instance. [ 671.292305] env[62503]: INFO nova.compute.manager [-] [instance: d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e] Took 1.03 seconds to deallocate network for instance. [ 671.294662] env[62503]: DEBUG nova.compute.claims [None req-ffbee50c-b7fd-4096-9606-99a0d4599842 tempest-ServersTestJSON-1104182753 tempest-ServersTestJSON-1104182753-project-member] [instance: d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e] Aborting claim: {{(pid=62503) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 671.294863] env[62503]: DEBUG oslo_concurrency.lockutils [None req-ffbee50c-b7fd-4096-9606-99a0d4599842 tempest-ServersTestJSON-1104182753 tempest-ServersTestJSON-1104182753-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 671.439971] env[62503]: DEBUG nova.network.neutron [None req-fa417a80-55e3-4106-aabc-7d0e3e09907d tempest-ListImageFiltersTestJSON-1898919175 tempest-ListImageFiltersTestJSON-1898919175-project-member] [instance: f0fefd82-1670-4bbb-b250-da0c3b6ca3f6] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 671.496752] env[62503]: DEBUG nova.network.neutron [None req-fa417a80-55e3-4106-aabc-7d0e3e09907d tempest-ListImageFiltersTestJSON-1898919175 tempest-ListImageFiltersTestJSON-1898919175-project-member] [instance: f0fefd82-1670-4bbb-b250-da0c3b6ca3f6] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 671.585902] env[62503]: DEBUG oslo_concurrency.lockutils [req-4a94a7df-ff7f-49ee-ab30-e6c255a1b47b req-f393aaa3-de08-4fc1-8d67-647572581573 service nova] Releasing lock "refresh_cache-d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 671.586174] env[62503]: DEBUG nova.compute.manager [req-4a94a7df-ff7f-49ee-ab30-e6c255a1b47b req-f393aaa3-de08-4fc1-8d67-647572581573 service nova] [instance: d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e] Received event network-vif-deleted-65cd4476-dc4f-47cb-b6bf-f086c9b46323 {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 671.722427] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a067c751-04fa-4725-9a0b-65f55d1f4ac8 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.729997] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02207f66-21e0-4267-b76b-0a9e9d965e56 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.759441] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bbe254e-465a-4633-b656-c99c13782770 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.766914] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba30a89b-8fce-4985-8780-ba82047bbfb3 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.779808] env[62503]: DEBUG nova.compute.provider_tree [None req-856c8a58-cdb0-4f72-ae8c-c9a975e5795d tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 671.999531] env[62503]: DEBUG oslo_concurrency.lockutils [None req-fa417a80-55e3-4106-aabc-7d0e3e09907d tempest-ListImageFiltersTestJSON-1898919175 tempest-ListImageFiltersTestJSON-1898919175-project-member] Releasing lock "refresh_cache-f0fefd82-1670-4bbb-b250-da0c3b6ca3f6" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 671.999780] env[62503]: DEBUG nova.compute.manager [None req-fa417a80-55e3-4106-aabc-7d0e3e09907d tempest-ListImageFiltersTestJSON-1898919175 tempest-ListImageFiltersTestJSON-1898919175-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62503) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3032}} [ 671.999991] env[62503]: DEBUG nova.compute.manager [None req-fa417a80-55e3-4106-aabc-7d0e3e09907d tempest-ListImageFiltersTestJSON-1898919175 tempest-ListImageFiltersTestJSON-1898919175-project-member] [instance: f0fefd82-1670-4bbb-b250-da0c3b6ca3f6] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 672.000199] env[62503]: DEBUG nova.network.neutron [None req-fa417a80-55e3-4106-aabc-7d0e3e09907d tempest-ListImageFiltersTestJSON-1898919175 tempest-ListImageFiltersTestJSON-1898919175-project-member] [instance: f0fefd82-1670-4bbb-b250-da0c3b6ca3f6] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 672.014746] env[62503]: DEBUG nova.network.neutron [None req-fa417a80-55e3-4106-aabc-7d0e3e09907d tempest-ListImageFiltersTestJSON-1898919175 tempest-ListImageFiltersTestJSON-1898919175-project-member] [instance: f0fefd82-1670-4bbb-b250-da0c3b6ca3f6] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 672.169951] env[62503]: INFO nova.scheduler.client.report [None req-6b7abc00-006d-4855-9995-538e6edbbf28 tempest-ServersWithSpecificFlavorTestJSON-213993766 tempest-ServersWithSpecificFlavorTestJSON-213993766-project-member] Deleted allocations for instance 5a1af72f-71c8-42de-aa71-f011d85210a5 [ 672.282828] env[62503]: DEBUG nova.scheduler.client.report [None req-856c8a58-cdb0-4f72-ae8c-c9a975e5795d tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 672.518027] env[62503]: DEBUG nova.network.neutron [None req-fa417a80-55e3-4106-aabc-7d0e3e09907d tempest-ListImageFiltersTestJSON-1898919175 tempest-ListImageFiltersTestJSON-1898919175-project-member] [instance: f0fefd82-1670-4bbb-b250-da0c3b6ca3f6] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 672.678078] env[62503]: DEBUG oslo_concurrency.lockutils [None req-6b7abc00-006d-4855-9995-538e6edbbf28 tempest-ServersWithSpecificFlavorTestJSON-213993766 tempest-ServersWithSpecificFlavorTestJSON-213993766-project-member] Lock "5a1af72f-71c8-42de-aa71-f011d85210a5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 96.959s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 672.788074] env[62503]: DEBUG oslo_concurrency.lockutils [None req-856c8a58-cdb0-4f72-ae8c-c9a975e5795d tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.870s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 672.788754] env[62503]: ERROR nova.compute.manager [None req-856c8a58-cdb0-4f72-ae8c-c9a975e5795d tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: 0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 875038d9-e4a3-44de-82ca-eaa4a28ed28c, please check neutron logs for more information. [ 672.788754] env[62503]: ERROR nova.compute.manager [instance: 0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4] Traceback (most recent call last): [ 672.788754] env[62503]: ERROR nova.compute.manager [instance: 0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4] File "/opt/stack/nova/nova/compute/manager.py", line 2644, in _build_and_run_instance [ 672.788754] env[62503]: ERROR nova.compute.manager [instance: 0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4] self.driver.spawn(context, instance, image_meta, [ 672.788754] env[62503]: ERROR nova.compute.manager [instance: 0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 672.788754] env[62503]: ERROR nova.compute.manager [instance: 0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 672.788754] env[62503]: ERROR nova.compute.manager [instance: 0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 672.788754] env[62503]: ERROR nova.compute.manager [instance: 0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4] vm_ref = self.build_virtual_machine(instance, [ 672.788754] env[62503]: ERROR nova.compute.manager [instance: 0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 672.788754] env[62503]: ERROR nova.compute.manager [instance: 0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4] vif_infos = vmwarevif.get_vif_info(self._session, [ 672.788754] env[62503]: ERROR nova.compute.manager [instance: 0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 672.789075] env[62503]: ERROR nova.compute.manager [instance: 0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4] for vif in network_info: [ 672.789075] env[62503]: ERROR nova.compute.manager [instance: 0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 672.789075] env[62503]: ERROR nova.compute.manager [instance: 0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4] return self._sync_wrapper(fn, *args, **kwargs) [ 672.789075] env[62503]: ERROR nova.compute.manager [instance: 0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 672.789075] env[62503]: ERROR nova.compute.manager [instance: 0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4] self.wait() [ 672.789075] env[62503]: ERROR nova.compute.manager [instance: 0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 672.789075] env[62503]: ERROR nova.compute.manager [instance: 0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4] self[:] = self._gt.wait() [ 672.789075] env[62503]: ERROR nova.compute.manager [instance: 0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 672.789075] env[62503]: ERROR nova.compute.manager [instance: 0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4] return self._exit_event.wait() [ 672.789075] env[62503]: ERROR nova.compute.manager [instance: 0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 672.789075] env[62503]: ERROR nova.compute.manager [instance: 0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4] current.throw(*self._exc) [ 672.789075] env[62503]: ERROR nova.compute.manager [instance: 0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 672.789075] env[62503]: ERROR nova.compute.manager [instance: 0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4] result = function(*args, **kwargs) [ 672.789436] env[62503]: ERROR nova.compute.manager [instance: 0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 672.789436] env[62503]: ERROR nova.compute.manager [instance: 0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4] return func(*args, **kwargs) [ 672.789436] env[62503]: ERROR nova.compute.manager [instance: 0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4] File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 672.789436] env[62503]: ERROR nova.compute.manager [instance: 0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4] raise e [ 672.789436] env[62503]: ERROR nova.compute.manager [instance: 0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4] File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 672.789436] env[62503]: ERROR nova.compute.manager [instance: 0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4] nwinfo = self.network_api.allocate_for_instance( [ 672.789436] env[62503]: ERROR nova.compute.manager [instance: 0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 672.789436] env[62503]: ERROR nova.compute.manager [instance: 0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4] created_port_ids = self._update_ports_for_instance( [ 672.789436] env[62503]: ERROR nova.compute.manager [instance: 0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 672.789436] env[62503]: ERROR nova.compute.manager [instance: 0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4] with excutils.save_and_reraise_exception(): [ 672.789436] env[62503]: ERROR nova.compute.manager [instance: 0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 672.789436] env[62503]: ERROR nova.compute.manager [instance: 0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4] self.force_reraise() [ 672.789436] env[62503]: ERROR nova.compute.manager [instance: 0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 672.789811] env[62503]: ERROR nova.compute.manager [instance: 0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4] raise self.value [ 672.789811] env[62503]: ERROR nova.compute.manager [instance: 0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 672.789811] env[62503]: ERROR nova.compute.manager [instance: 0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4] updated_port = self._update_port( [ 672.789811] env[62503]: ERROR nova.compute.manager [instance: 0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 672.789811] env[62503]: ERROR nova.compute.manager [instance: 0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4] _ensure_no_port_binding_failure(port) [ 672.789811] env[62503]: ERROR nova.compute.manager [instance: 0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 672.789811] env[62503]: ERROR nova.compute.manager [instance: 0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4] raise exception.PortBindingFailed(port_id=port['id']) [ 672.789811] env[62503]: ERROR nova.compute.manager [instance: 0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4] nova.exception.PortBindingFailed: Binding failed for port 875038d9-e4a3-44de-82ca-eaa4a28ed28c, please check neutron logs for more information. [ 672.789811] env[62503]: ERROR nova.compute.manager [instance: 0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4] [ 672.789811] env[62503]: DEBUG nova.compute.utils [None req-856c8a58-cdb0-4f72-ae8c-c9a975e5795d tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: 0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4] Binding failed for port 875038d9-e4a3-44de-82ca-eaa4a28ed28c, please check neutron logs for more information. {{(pid=62503) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 672.790885] env[62503]: DEBUG oslo_concurrency.lockutils [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 17.620s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 672.792314] env[62503]: DEBUG nova.compute.manager [None req-856c8a58-cdb0-4f72-ae8c-c9a975e5795d tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: 0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4] Build of instance 0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4 was re-scheduled: Binding failed for port 875038d9-e4a3-44de-82ca-eaa4a28ed28c, please check neutron logs for more information. {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2483}} [ 672.792727] env[62503]: DEBUG nova.compute.manager [None req-856c8a58-cdb0-4f72-ae8c-c9a975e5795d tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: 0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4] Unplugging VIFs for instance {{(pid=62503) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3009}} [ 672.792952] env[62503]: DEBUG oslo_concurrency.lockutils [None req-856c8a58-cdb0-4f72-ae8c-c9a975e5795d tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Acquiring lock "refresh_cache-0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 672.793218] env[62503]: DEBUG oslo_concurrency.lockutils [None req-856c8a58-cdb0-4f72-ae8c-c9a975e5795d tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Acquired lock "refresh_cache-0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 672.793358] env[62503]: DEBUG nova.network.neutron [None req-856c8a58-cdb0-4f72-ae8c-c9a975e5795d tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: 0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 673.019547] env[62503]: INFO nova.compute.manager [None req-fa417a80-55e3-4106-aabc-7d0e3e09907d tempest-ListImageFiltersTestJSON-1898919175 tempest-ListImageFiltersTestJSON-1898919175-project-member] [instance: f0fefd82-1670-4bbb-b250-da0c3b6ca3f6] Took 1.02 seconds to deallocate network for instance. [ 673.181101] env[62503]: DEBUG nova.compute.manager [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] [instance: 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9] Starting instance... {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 673.321327] env[62503]: DEBUG nova.network.neutron [None req-856c8a58-cdb0-4f72-ae8c-c9a975e5795d tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: 0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 673.424368] env[62503]: DEBUG nova.network.neutron [None req-856c8a58-cdb0-4f72-ae8c-c9a975e5795d tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: 0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 673.701924] env[62503]: DEBUG oslo_concurrency.lockutils [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 673.930166] env[62503]: DEBUG oslo_concurrency.lockutils [None req-856c8a58-cdb0-4f72-ae8c-c9a975e5795d tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Releasing lock "refresh_cache-0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 673.930166] env[62503]: DEBUG nova.compute.manager [None req-856c8a58-cdb0-4f72-ae8c-c9a975e5795d tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62503) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3032}} [ 673.930166] env[62503]: DEBUG nova.compute.manager [None req-856c8a58-cdb0-4f72-ae8c-c9a975e5795d tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: 0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 673.930166] env[62503]: DEBUG nova.network.neutron [None req-856c8a58-cdb0-4f72-ae8c-c9a975e5795d tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: 0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 673.953812] env[62503]: DEBUG nova.network.neutron [None req-856c8a58-cdb0-4f72-ae8c-c9a975e5795d tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: 0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 674.046389] env[62503]: INFO nova.scheduler.client.report [None req-fa417a80-55e3-4106-aabc-7d0e3e09907d tempest-ListImageFiltersTestJSON-1898919175 tempest-ListImageFiltersTestJSON-1898919175-project-member] Deleted allocations for instance f0fefd82-1670-4bbb-b250-da0c3b6ca3f6 [ 674.326476] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Instance f0fefd82-1670-4bbb-b250-da0c3b6ca3f6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 674.458527] env[62503]: DEBUG nova.network.neutron [None req-856c8a58-cdb0-4f72-ae8c-c9a975e5795d tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: 0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 674.554753] env[62503]: DEBUG oslo_concurrency.lockutils [None req-fa417a80-55e3-4106-aabc-7d0e3e09907d tempest-ListImageFiltersTestJSON-1898919175 tempest-ListImageFiltersTestJSON-1898919175-project-member] Lock "f0fefd82-1670-4bbb-b250-da0c3b6ca3f6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 96.013s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 674.829898] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Instance 0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 674.830151] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Instance c32f170d-1e88-4716-a02a-b8db6896e900 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 674.830288] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Instance e49252e3-11cc-49c3-b959-24ad87ad48c9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 674.830414] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Instance 5ca1a33c-7324-481c-95cd-3761ce8ccf13 actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 674.830533] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Instance 2aa7880f-de24-4f32-b027-731a2030f987 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 674.830650] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Instance 0af1e65d-ca88-475e-a871-4087bd49cd9d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 674.831474] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Instance d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 674.965071] env[62503]: INFO nova.compute.manager [None req-856c8a58-cdb0-4f72-ae8c-c9a975e5795d tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: 0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4] Took 1.04 seconds to deallocate network for instance. [ 675.058546] env[62503]: DEBUG nova.compute.manager [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] [instance: df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36] Starting instance... {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 675.333769] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Instance f244a7c9-2f39-4f91-aeba-e5f36e7f79ef has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 675.580159] env[62503]: DEBUG oslo_concurrency.lockutils [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 675.841722] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Instance 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 676.001017] env[62503]: INFO nova.scheduler.client.report [None req-856c8a58-cdb0-4f72-ae8c-c9a975e5795d tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Deleted allocations for instance 0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4 [ 676.344986] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Instance df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 676.507458] env[62503]: DEBUG oslo_concurrency.lockutils [None req-856c8a58-cdb0-4f72-ae8c-c9a975e5795d tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Lock "0acb10f9-3d1e-4621-9b7f-dd7c6b1f86b4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 97.649s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 676.847986] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Instance d4930731-7333-426c-a2fc-a732d351a0f0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 677.010167] env[62503]: DEBUG nova.compute.manager [None req-e421dd54-c082-45de-8156-0a223999bd64 tempest-ServerRescueTestJSON-89698622 tempest-ServerRescueTestJSON-89698622-project-member] [instance: d4930731-7333-426c-a2fc-a732d351a0f0] Starting instance... {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 677.350634] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Instance 9bda2d4c-38c0-49ba-9a69-402869ff6a65 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 677.528391] env[62503]: DEBUG oslo_concurrency.lockutils [None req-e421dd54-c082-45de-8156-0a223999bd64 tempest-ServerRescueTestJSON-89698622 tempest-ServerRescueTestJSON-89698622-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 677.854097] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Instance ce8d9b01-e99d-4051-bd96-659692a436da has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 677.962496] env[62503]: DEBUG oslo_concurrency.lockutils [None req-ecc5c12d-2a3e-430a-9c3c-15d45f1a4068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Acquiring lock "ad7badc9-cb11-4532-885a-28fb3d4de9ef" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 677.962691] env[62503]: DEBUG oslo_concurrency.lockutils [None req-ecc5c12d-2a3e-430a-9c3c-15d45f1a4068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Lock "ad7badc9-cb11-4532-885a-28fb3d4de9ef" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 678.356545] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Instance 3178d5cd-1937-422b-9287-970d095aa452 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 678.860345] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Instance 97ac40d6-1c29-4282-86e5-be27a20cf5e0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 679.363875] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Instance 0b40f385-db0a-460c-b7fd-47e4d6afbaf9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 679.866872] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Instance c1693c1f-6497-429c-a7f7-5bf5591684d1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 680.370489] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Instance be79632e-78ca-440a-92ef-d86a9f32693e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 680.874731] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Instance 47d67edd-0860-49a6-ab7e-0511cffb82ae has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 681.377915] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Instance 141d7d04-0267-4e15-90ed-112ac8fb8c9b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 681.882515] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Instance d4990c66-63d5-43b0-8187-2074c99ccde2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 682.385112] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Instance 48d9b18d-04b5-44e4-809e-383819d39418 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 682.717193] env[62503]: DEBUG oslo_concurrency.lockutils [None req-3dbf8d33-063c-4c40-92c7-9bf6b35e4526 tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] Acquiring lock "32d4fda5-6d30-4416-b187-cf5548cb23bf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 682.717442] env[62503]: DEBUG oslo_concurrency.lockutils [None req-3dbf8d33-063c-4c40-92c7-9bf6b35e4526 tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] Lock "32d4fda5-6d30-4416-b187-cf5548cb23bf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 682.888740] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Instance b9259ced-344a-42e5-835d-3713631a68c7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 683.392148] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Instance cf6fb485-9672-42b5-ac88-bbf5e0941393 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 683.895215] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Instance ff56659a-18f8-44c5-ab10-872e636a9357 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 684.398679] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Instance 529e6f8e-49b9-46a7-a09f-17238522f7bc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 684.399029] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Total usable vcpus: 48, total allocated vcpus: 6 {{(pid=62503) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 684.399128] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1664MB phys_disk=200GB used_disk=5GB total_vcpus=48 used_vcpus=6 pci_stats=[] {{(pid=62503) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 684.703071] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00a7868f-b223-423d-bb8b-e6f7a4104f3a {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.710860] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1070d469-8659-42d2-8632-aaa39e9ac29d {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.739343] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebe525ac-e251-4a18-8d23-3bb27ec94bf8 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.746284] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-899e41c1-b15a-4f8a-a3d4-53d9c702ff19 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.758837] env[62503]: DEBUG nova.compute.provider_tree [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 685.261405] env[62503]: DEBUG nova.scheduler.client.report [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 685.768033] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62503) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 685.768033] env[62503]: DEBUG oslo_concurrency.lockutils [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 12.977s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 685.768033] env[62503]: DEBUG oslo_concurrency.lockutils [None req-812ef84d-18c3-4e01-ae3f-ea5c4ae1494f tempest-ListImageFiltersTestJSON-1898919175 tempest-ListImageFiltersTestJSON-1898919175-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 28.949s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 686.581704] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66493408-ac9d-4e08-8049-0ef26324f47e {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.589283] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9374529a-2693-4322-83db-66f79b967510 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.621609] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1fdbd44-8b34-43d8-a7ce-12048d592cf4 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.629198] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba56621f-0788-42f1-bd99-05c879177d4e {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.642105] env[62503]: DEBUG nova.compute.provider_tree [None req-812ef84d-18c3-4e01-ae3f-ea5c4ae1494f tempest-ListImageFiltersTestJSON-1898919175 tempest-ListImageFiltersTestJSON-1898919175-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 687.146808] env[62503]: DEBUG nova.scheduler.client.report [None req-812ef84d-18c3-4e01-ae3f-ea5c4ae1494f tempest-ListImageFiltersTestJSON-1898919175 tempest-ListImageFiltersTestJSON-1898919175-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 687.651759] env[62503]: DEBUG oslo_concurrency.lockutils [None req-812ef84d-18c3-4e01-ae3f-ea5c4ae1494f tempest-ListImageFiltersTestJSON-1898919175 tempest-ListImageFiltersTestJSON-1898919175-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.884s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 687.652557] env[62503]: ERROR nova.compute.manager [None req-812ef84d-18c3-4e01-ae3f-ea5c4ae1494f tempest-ListImageFiltersTestJSON-1898919175 tempest-ListImageFiltersTestJSON-1898919175-project-member] [instance: c32f170d-1e88-4716-a02a-b8db6896e900] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 25850fde-64a4-4bf1-8627-3edfdbe50acd, please check neutron logs for more information. [ 687.652557] env[62503]: ERROR nova.compute.manager [instance: c32f170d-1e88-4716-a02a-b8db6896e900] Traceback (most recent call last): [ 687.652557] env[62503]: ERROR nova.compute.manager [instance: c32f170d-1e88-4716-a02a-b8db6896e900] File "/opt/stack/nova/nova/compute/manager.py", line 2644, in _build_and_run_instance [ 687.652557] env[62503]: ERROR nova.compute.manager [instance: c32f170d-1e88-4716-a02a-b8db6896e900] self.driver.spawn(context, instance, image_meta, [ 687.652557] env[62503]: ERROR nova.compute.manager [instance: c32f170d-1e88-4716-a02a-b8db6896e900] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 687.652557] env[62503]: ERROR nova.compute.manager [instance: c32f170d-1e88-4716-a02a-b8db6896e900] self._vmops.spawn(context, instance, image_meta, injected_files, [ 687.652557] env[62503]: ERROR nova.compute.manager [instance: c32f170d-1e88-4716-a02a-b8db6896e900] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 687.652557] env[62503]: ERROR nova.compute.manager [instance: c32f170d-1e88-4716-a02a-b8db6896e900] vm_ref = self.build_virtual_machine(instance, [ 687.652557] env[62503]: ERROR nova.compute.manager [instance: c32f170d-1e88-4716-a02a-b8db6896e900] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 687.652557] env[62503]: ERROR nova.compute.manager [instance: c32f170d-1e88-4716-a02a-b8db6896e900] vif_infos = vmwarevif.get_vif_info(self._session, [ 687.652557] env[62503]: ERROR nova.compute.manager [instance: c32f170d-1e88-4716-a02a-b8db6896e900] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 687.652908] env[62503]: ERROR nova.compute.manager [instance: c32f170d-1e88-4716-a02a-b8db6896e900] for vif in network_info: [ 687.652908] env[62503]: ERROR nova.compute.manager [instance: c32f170d-1e88-4716-a02a-b8db6896e900] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 687.652908] env[62503]: ERROR nova.compute.manager [instance: c32f170d-1e88-4716-a02a-b8db6896e900] return self._sync_wrapper(fn, *args, **kwargs) [ 687.652908] env[62503]: ERROR nova.compute.manager [instance: c32f170d-1e88-4716-a02a-b8db6896e900] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 687.652908] env[62503]: ERROR nova.compute.manager [instance: c32f170d-1e88-4716-a02a-b8db6896e900] self.wait() [ 687.652908] env[62503]: ERROR nova.compute.manager [instance: c32f170d-1e88-4716-a02a-b8db6896e900] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 687.652908] env[62503]: ERROR nova.compute.manager [instance: c32f170d-1e88-4716-a02a-b8db6896e900] self[:] = self._gt.wait() [ 687.652908] env[62503]: ERROR nova.compute.manager [instance: c32f170d-1e88-4716-a02a-b8db6896e900] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 687.652908] env[62503]: ERROR nova.compute.manager [instance: c32f170d-1e88-4716-a02a-b8db6896e900] return self._exit_event.wait() [ 687.652908] env[62503]: ERROR nova.compute.manager [instance: c32f170d-1e88-4716-a02a-b8db6896e900] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 687.652908] env[62503]: ERROR nova.compute.manager [instance: c32f170d-1e88-4716-a02a-b8db6896e900] result = hub.switch() [ 687.652908] env[62503]: ERROR nova.compute.manager [instance: c32f170d-1e88-4716-a02a-b8db6896e900] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 687.652908] env[62503]: ERROR nova.compute.manager [instance: c32f170d-1e88-4716-a02a-b8db6896e900] return self.greenlet.switch() [ 687.653264] env[62503]: ERROR nova.compute.manager [instance: c32f170d-1e88-4716-a02a-b8db6896e900] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 687.653264] env[62503]: ERROR nova.compute.manager [instance: c32f170d-1e88-4716-a02a-b8db6896e900] result = function(*args, **kwargs) [ 687.653264] env[62503]: ERROR nova.compute.manager [instance: c32f170d-1e88-4716-a02a-b8db6896e900] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 687.653264] env[62503]: ERROR nova.compute.manager [instance: c32f170d-1e88-4716-a02a-b8db6896e900] return func(*args, **kwargs) [ 687.653264] env[62503]: ERROR nova.compute.manager [instance: c32f170d-1e88-4716-a02a-b8db6896e900] File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 687.653264] env[62503]: ERROR nova.compute.manager [instance: c32f170d-1e88-4716-a02a-b8db6896e900] raise e [ 687.653264] env[62503]: ERROR nova.compute.manager [instance: c32f170d-1e88-4716-a02a-b8db6896e900] File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 687.653264] env[62503]: ERROR nova.compute.manager [instance: c32f170d-1e88-4716-a02a-b8db6896e900] nwinfo = self.network_api.allocate_for_instance( [ 687.653264] env[62503]: ERROR nova.compute.manager [instance: c32f170d-1e88-4716-a02a-b8db6896e900] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 687.653264] env[62503]: ERROR nova.compute.manager [instance: c32f170d-1e88-4716-a02a-b8db6896e900] created_port_ids = self._update_ports_for_instance( [ 687.653264] env[62503]: ERROR nova.compute.manager [instance: c32f170d-1e88-4716-a02a-b8db6896e900] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 687.653264] env[62503]: ERROR nova.compute.manager [instance: c32f170d-1e88-4716-a02a-b8db6896e900] with excutils.save_and_reraise_exception(): [ 687.653264] env[62503]: ERROR nova.compute.manager [instance: c32f170d-1e88-4716-a02a-b8db6896e900] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 687.653609] env[62503]: ERROR nova.compute.manager [instance: c32f170d-1e88-4716-a02a-b8db6896e900] self.force_reraise() [ 687.653609] env[62503]: ERROR nova.compute.manager [instance: c32f170d-1e88-4716-a02a-b8db6896e900] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 687.653609] env[62503]: ERROR nova.compute.manager [instance: c32f170d-1e88-4716-a02a-b8db6896e900] raise self.value [ 687.653609] env[62503]: ERROR nova.compute.manager [instance: c32f170d-1e88-4716-a02a-b8db6896e900] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 687.653609] env[62503]: ERROR nova.compute.manager [instance: c32f170d-1e88-4716-a02a-b8db6896e900] updated_port = self._update_port( [ 687.653609] env[62503]: ERROR nova.compute.manager [instance: c32f170d-1e88-4716-a02a-b8db6896e900] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 687.653609] env[62503]: ERROR nova.compute.manager [instance: c32f170d-1e88-4716-a02a-b8db6896e900] _ensure_no_port_binding_failure(port) [ 687.653609] env[62503]: ERROR nova.compute.manager [instance: c32f170d-1e88-4716-a02a-b8db6896e900] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 687.653609] env[62503]: ERROR nova.compute.manager [instance: c32f170d-1e88-4716-a02a-b8db6896e900] raise exception.PortBindingFailed(port_id=port['id']) [ 687.653609] env[62503]: ERROR nova.compute.manager [instance: c32f170d-1e88-4716-a02a-b8db6896e900] nova.exception.PortBindingFailed: Binding failed for port 25850fde-64a4-4bf1-8627-3edfdbe50acd, please check neutron logs for more information. [ 687.653609] env[62503]: ERROR nova.compute.manager [instance: c32f170d-1e88-4716-a02a-b8db6896e900] [ 687.653915] env[62503]: DEBUG nova.compute.utils [None req-812ef84d-18c3-4e01-ae3f-ea5c4ae1494f tempest-ListImageFiltersTestJSON-1898919175 tempest-ListImageFiltersTestJSON-1898919175-project-member] [instance: c32f170d-1e88-4716-a02a-b8db6896e900] Binding failed for port 25850fde-64a4-4bf1-8627-3edfdbe50acd, please check neutron logs for more information. {{(pid=62503) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 687.654945] env[62503]: DEBUG nova.compute.manager [None req-812ef84d-18c3-4e01-ae3f-ea5c4ae1494f tempest-ListImageFiltersTestJSON-1898919175 tempest-ListImageFiltersTestJSON-1898919175-project-member] [instance: c32f170d-1e88-4716-a02a-b8db6896e900] Build of instance c32f170d-1e88-4716-a02a-b8db6896e900 was re-scheduled: Binding failed for port 25850fde-64a4-4bf1-8627-3edfdbe50acd, please check neutron logs for more information. {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2483}} [ 687.655369] env[62503]: DEBUG nova.compute.manager [None req-812ef84d-18c3-4e01-ae3f-ea5c4ae1494f tempest-ListImageFiltersTestJSON-1898919175 tempest-ListImageFiltersTestJSON-1898919175-project-member] [instance: c32f170d-1e88-4716-a02a-b8db6896e900] Unplugging VIFs for instance {{(pid=62503) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3009}} [ 687.655607] env[62503]: DEBUG oslo_concurrency.lockutils [None req-812ef84d-18c3-4e01-ae3f-ea5c4ae1494f tempest-ListImageFiltersTestJSON-1898919175 tempest-ListImageFiltersTestJSON-1898919175-project-member] Acquiring lock "refresh_cache-c32f170d-1e88-4716-a02a-b8db6896e900" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 687.655780] env[62503]: DEBUG oslo_concurrency.lockutils [None req-812ef84d-18c3-4e01-ae3f-ea5c4ae1494f tempest-ListImageFiltersTestJSON-1898919175 tempest-ListImageFiltersTestJSON-1898919175-project-member] Acquired lock "refresh_cache-c32f170d-1e88-4716-a02a-b8db6896e900" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 687.655973] env[62503]: DEBUG nova.network.neutron [None req-812ef84d-18c3-4e01-ae3f-ea5c4ae1494f tempest-ListImageFiltersTestJSON-1898919175 tempest-ListImageFiltersTestJSON-1898919175-project-member] [instance: c32f170d-1e88-4716-a02a-b8db6896e900] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 687.657951] env[62503]: DEBUG oslo_concurrency.lockutils [None req-b638fdbf-443c-4fc0-ac4f-b9d527537434 tempest-ImagesNegativeTestJSON-829730830 tempest-ImagesNegativeTestJSON-829730830-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 28.278s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 688.174104] env[62503]: DEBUG nova.network.neutron [None req-812ef84d-18c3-4e01-ae3f-ea5c4ae1494f tempest-ListImageFiltersTestJSON-1898919175 tempest-ListImageFiltersTestJSON-1898919175-project-member] [instance: c32f170d-1e88-4716-a02a-b8db6896e900] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 688.222917] env[62503]: DEBUG nova.network.neutron [None req-812ef84d-18c3-4e01-ae3f-ea5c4ae1494f tempest-ListImageFiltersTestJSON-1898919175 tempest-ListImageFiltersTestJSON-1898919175-project-member] [instance: c32f170d-1e88-4716-a02a-b8db6896e900] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 688.447836] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b771046b-a529-456f-b2ee-848dc4883d72 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.455747] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08c772af-de41-4db1-b596-e797b00da4aa {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.486741] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18e57e57-774c-4941-a4d3-af5b93fec4a9 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.494453] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a5e44f5-82d5-4a4b-89b4-9d42c2448085 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.507760] env[62503]: DEBUG nova.compute.provider_tree [None req-b638fdbf-443c-4fc0-ac4f-b9d527537434 tempest-ImagesNegativeTestJSON-829730830 tempest-ImagesNegativeTestJSON-829730830-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 688.727158] env[62503]: DEBUG oslo_concurrency.lockutils [None req-812ef84d-18c3-4e01-ae3f-ea5c4ae1494f tempest-ListImageFiltersTestJSON-1898919175 tempest-ListImageFiltersTestJSON-1898919175-project-member] Releasing lock "refresh_cache-c32f170d-1e88-4716-a02a-b8db6896e900" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 688.727422] env[62503]: DEBUG nova.compute.manager [None req-812ef84d-18c3-4e01-ae3f-ea5c4ae1494f tempest-ListImageFiltersTestJSON-1898919175 tempest-ListImageFiltersTestJSON-1898919175-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62503) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3032}} [ 688.727590] env[62503]: DEBUG nova.compute.manager [None req-812ef84d-18c3-4e01-ae3f-ea5c4ae1494f tempest-ListImageFiltersTestJSON-1898919175 tempest-ListImageFiltersTestJSON-1898919175-project-member] [instance: c32f170d-1e88-4716-a02a-b8db6896e900] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 688.727753] env[62503]: DEBUG nova.network.neutron [None req-812ef84d-18c3-4e01-ae3f-ea5c4ae1494f tempest-ListImageFiltersTestJSON-1898919175 tempest-ListImageFiltersTestJSON-1898919175-project-member] [instance: c32f170d-1e88-4716-a02a-b8db6896e900] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 688.744084] env[62503]: DEBUG nova.network.neutron [None req-812ef84d-18c3-4e01-ae3f-ea5c4ae1494f tempest-ListImageFiltersTestJSON-1898919175 tempest-ListImageFiltersTestJSON-1898919175-project-member] [instance: c32f170d-1e88-4716-a02a-b8db6896e900] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 689.011283] env[62503]: DEBUG nova.scheduler.client.report [None req-b638fdbf-443c-4fc0-ac4f-b9d527537434 tempest-ImagesNegativeTestJSON-829730830 tempest-ImagesNegativeTestJSON-829730830-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 689.246863] env[62503]: DEBUG nova.network.neutron [None req-812ef84d-18c3-4e01-ae3f-ea5c4ae1494f tempest-ListImageFiltersTestJSON-1898919175 tempest-ListImageFiltersTestJSON-1898919175-project-member] [instance: c32f170d-1e88-4716-a02a-b8db6896e900] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 689.516542] env[62503]: DEBUG oslo_concurrency.lockutils [None req-b638fdbf-443c-4fc0-ac4f-b9d527537434 tempest-ImagesNegativeTestJSON-829730830 tempest-ImagesNegativeTestJSON-829730830-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.858s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 689.517107] env[62503]: ERROR nova.compute.manager [None req-b638fdbf-443c-4fc0-ac4f-b9d527537434 tempest-ImagesNegativeTestJSON-829730830 tempest-ImagesNegativeTestJSON-829730830-project-member] [instance: e49252e3-11cc-49c3-b959-24ad87ad48c9] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port ccf42a2d-595c-4892-aaea-44d57bac1bfb, please check neutron logs for more information. [ 689.517107] env[62503]: ERROR nova.compute.manager [instance: e49252e3-11cc-49c3-b959-24ad87ad48c9] Traceback (most recent call last): [ 689.517107] env[62503]: ERROR nova.compute.manager [instance: e49252e3-11cc-49c3-b959-24ad87ad48c9] File "/opt/stack/nova/nova/compute/manager.py", line 2644, in _build_and_run_instance [ 689.517107] env[62503]: ERROR nova.compute.manager [instance: e49252e3-11cc-49c3-b959-24ad87ad48c9] self.driver.spawn(context, instance, image_meta, [ 689.517107] env[62503]: ERROR nova.compute.manager [instance: e49252e3-11cc-49c3-b959-24ad87ad48c9] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 689.517107] env[62503]: ERROR nova.compute.manager [instance: e49252e3-11cc-49c3-b959-24ad87ad48c9] self._vmops.spawn(context, instance, image_meta, injected_files, [ 689.517107] env[62503]: ERROR nova.compute.manager [instance: e49252e3-11cc-49c3-b959-24ad87ad48c9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 689.517107] env[62503]: ERROR nova.compute.manager [instance: e49252e3-11cc-49c3-b959-24ad87ad48c9] vm_ref = self.build_virtual_machine(instance, [ 689.517107] env[62503]: ERROR nova.compute.manager [instance: e49252e3-11cc-49c3-b959-24ad87ad48c9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 689.517107] env[62503]: ERROR nova.compute.manager [instance: e49252e3-11cc-49c3-b959-24ad87ad48c9] vif_infos = vmwarevif.get_vif_info(self._session, [ 689.517107] env[62503]: ERROR nova.compute.manager [instance: e49252e3-11cc-49c3-b959-24ad87ad48c9] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 689.517462] env[62503]: ERROR nova.compute.manager [instance: e49252e3-11cc-49c3-b959-24ad87ad48c9] for vif in network_info: [ 689.517462] env[62503]: ERROR nova.compute.manager [instance: e49252e3-11cc-49c3-b959-24ad87ad48c9] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 689.517462] env[62503]: ERROR nova.compute.manager [instance: e49252e3-11cc-49c3-b959-24ad87ad48c9] return self._sync_wrapper(fn, *args, **kwargs) [ 689.517462] env[62503]: ERROR nova.compute.manager [instance: e49252e3-11cc-49c3-b959-24ad87ad48c9] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 689.517462] env[62503]: ERROR nova.compute.manager [instance: e49252e3-11cc-49c3-b959-24ad87ad48c9] self.wait() [ 689.517462] env[62503]: ERROR nova.compute.manager [instance: e49252e3-11cc-49c3-b959-24ad87ad48c9] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 689.517462] env[62503]: ERROR nova.compute.manager [instance: e49252e3-11cc-49c3-b959-24ad87ad48c9] self[:] = self._gt.wait() [ 689.517462] env[62503]: ERROR nova.compute.manager [instance: e49252e3-11cc-49c3-b959-24ad87ad48c9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 689.517462] env[62503]: ERROR nova.compute.manager [instance: e49252e3-11cc-49c3-b959-24ad87ad48c9] return self._exit_event.wait() [ 689.517462] env[62503]: ERROR nova.compute.manager [instance: e49252e3-11cc-49c3-b959-24ad87ad48c9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 689.517462] env[62503]: ERROR nova.compute.manager [instance: e49252e3-11cc-49c3-b959-24ad87ad48c9] current.throw(*self._exc) [ 689.517462] env[62503]: ERROR nova.compute.manager [instance: e49252e3-11cc-49c3-b959-24ad87ad48c9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 689.517462] env[62503]: ERROR nova.compute.manager [instance: e49252e3-11cc-49c3-b959-24ad87ad48c9] result = function(*args, **kwargs) [ 689.517770] env[62503]: ERROR nova.compute.manager [instance: e49252e3-11cc-49c3-b959-24ad87ad48c9] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 689.517770] env[62503]: ERROR nova.compute.manager [instance: e49252e3-11cc-49c3-b959-24ad87ad48c9] return func(*args, **kwargs) [ 689.517770] env[62503]: ERROR nova.compute.manager [instance: e49252e3-11cc-49c3-b959-24ad87ad48c9] File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 689.517770] env[62503]: ERROR nova.compute.manager [instance: e49252e3-11cc-49c3-b959-24ad87ad48c9] raise e [ 689.517770] env[62503]: ERROR nova.compute.manager [instance: e49252e3-11cc-49c3-b959-24ad87ad48c9] File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 689.517770] env[62503]: ERROR nova.compute.manager [instance: e49252e3-11cc-49c3-b959-24ad87ad48c9] nwinfo = self.network_api.allocate_for_instance( [ 689.517770] env[62503]: ERROR nova.compute.manager [instance: e49252e3-11cc-49c3-b959-24ad87ad48c9] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 689.517770] env[62503]: ERROR nova.compute.manager [instance: e49252e3-11cc-49c3-b959-24ad87ad48c9] created_port_ids = self._update_ports_for_instance( [ 689.517770] env[62503]: ERROR nova.compute.manager [instance: e49252e3-11cc-49c3-b959-24ad87ad48c9] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 689.517770] env[62503]: ERROR nova.compute.manager [instance: e49252e3-11cc-49c3-b959-24ad87ad48c9] with excutils.save_and_reraise_exception(): [ 689.517770] env[62503]: ERROR nova.compute.manager [instance: e49252e3-11cc-49c3-b959-24ad87ad48c9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 689.517770] env[62503]: ERROR nova.compute.manager [instance: e49252e3-11cc-49c3-b959-24ad87ad48c9] self.force_reraise() [ 689.517770] env[62503]: ERROR nova.compute.manager [instance: e49252e3-11cc-49c3-b959-24ad87ad48c9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 689.518218] env[62503]: ERROR nova.compute.manager [instance: e49252e3-11cc-49c3-b959-24ad87ad48c9] raise self.value [ 689.518218] env[62503]: ERROR nova.compute.manager [instance: e49252e3-11cc-49c3-b959-24ad87ad48c9] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 689.518218] env[62503]: ERROR nova.compute.manager [instance: e49252e3-11cc-49c3-b959-24ad87ad48c9] updated_port = self._update_port( [ 689.518218] env[62503]: ERROR nova.compute.manager [instance: e49252e3-11cc-49c3-b959-24ad87ad48c9] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 689.518218] env[62503]: ERROR nova.compute.manager [instance: e49252e3-11cc-49c3-b959-24ad87ad48c9] _ensure_no_port_binding_failure(port) [ 689.518218] env[62503]: ERROR nova.compute.manager [instance: e49252e3-11cc-49c3-b959-24ad87ad48c9] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 689.518218] env[62503]: ERROR nova.compute.manager [instance: e49252e3-11cc-49c3-b959-24ad87ad48c9] raise exception.PortBindingFailed(port_id=port['id']) [ 689.518218] env[62503]: ERROR nova.compute.manager [instance: e49252e3-11cc-49c3-b959-24ad87ad48c9] nova.exception.PortBindingFailed: Binding failed for port ccf42a2d-595c-4892-aaea-44d57bac1bfb, please check neutron logs for more information. [ 689.518218] env[62503]: ERROR nova.compute.manager [instance: e49252e3-11cc-49c3-b959-24ad87ad48c9] [ 689.518218] env[62503]: DEBUG nova.compute.utils [None req-b638fdbf-443c-4fc0-ac4f-b9d527537434 tempest-ImagesNegativeTestJSON-829730830 tempest-ImagesNegativeTestJSON-829730830-project-member] [instance: e49252e3-11cc-49c3-b959-24ad87ad48c9] Binding failed for port ccf42a2d-595c-4892-aaea-44d57bac1bfb, please check neutron logs for more information. {{(pid=62503) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 689.519144] env[62503]: DEBUG oslo_concurrency.lockutils [None req-f5db60c5-fc27-48c4-aa82-4f7bd2779675 tempest-ServersTestBootFromVolume-938742055 tempest-ServersTestBootFromVolume-938742055-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 25.368s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 689.522090] env[62503]: DEBUG nova.compute.manager [None req-b638fdbf-443c-4fc0-ac4f-b9d527537434 tempest-ImagesNegativeTestJSON-829730830 tempest-ImagesNegativeTestJSON-829730830-project-member] [instance: e49252e3-11cc-49c3-b959-24ad87ad48c9] Build of instance e49252e3-11cc-49c3-b959-24ad87ad48c9 was re-scheduled: Binding failed for port ccf42a2d-595c-4892-aaea-44d57bac1bfb, please check neutron logs for more information. {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2483}} [ 689.522516] env[62503]: DEBUG nova.compute.manager [None req-b638fdbf-443c-4fc0-ac4f-b9d527537434 tempest-ImagesNegativeTestJSON-829730830 tempest-ImagesNegativeTestJSON-829730830-project-member] [instance: e49252e3-11cc-49c3-b959-24ad87ad48c9] Unplugging VIFs for instance {{(pid=62503) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3009}} [ 689.522736] env[62503]: DEBUG oslo_concurrency.lockutils [None req-b638fdbf-443c-4fc0-ac4f-b9d527537434 tempest-ImagesNegativeTestJSON-829730830 tempest-ImagesNegativeTestJSON-829730830-project-member] Acquiring lock "refresh_cache-e49252e3-11cc-49c3-b959-24ad87ad48c9" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 689.522880] env[62503]: DEBUG oslo_concurrency.lockutils [None req-b638fdbf-443c-4fc0-ac4f-b9d527537434 tempest-ImagesNegativeTestJSON-829730830 tempest-ImagesNegativeTestJSON-829730830-project-member] Acquired lock "refresh_cache-e49252e3-11cc-49c3-b959-24ad87ad48c9" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 689.523055] env[62503]: DEBUG nova.network.neutron [None req-b638fdbf-443c-4fc0-ac4f-b9d527537434 tempest-ImagesNegativeTestJSON-829730830 tempest-ImagesNegativeTestJSON-829730830-project-member] [instance: e49252e3-11cc-49c3-b959-24ad87ad48c9] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 689.750427] env[62503]: INFO nova.compute.manager [None req-812ef84d-18c3-4e01-ae3f-ea5c4ae1494f tempest-ListImageFiltersTestJSON-1898919175 tempest-ListImageFiltersTestJSON-1898919175-project-member] [instance: c32f170d-1e88-4716-a02a-b8db6896e900] Took 1.02 seconds to deallocate network for instance. [ 690.044825] env[62503]: DEBUG nova.network.neutron [None req-b638fdbf-443c-4fc0-ac4f-b9d527537434 tempest-ImagesNegativeTestJSON-829730830 tempest-ImagesNegativeTestJSON-829730830-project-member] [instance: e49252e3-11cc-49c3-b959-24ad87ad48c9] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 690.124252] env[62503]: DEBUG nova.network.neutron [None req-b638fdbf-443c-4fc0-ac4f-b9d527537434 tempest-ImagesNegativeTestJSON-829730830 tempest-ImagesNegativeTestJSON-829730830-project-member] [instance: e49252e3-11cc-49c3-b959-24ad87ad48c9] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 690.333453] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36d4973b-e279-4d3f-8e20-1f188cf0b783 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.340710] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb6bbb2c-22d8-4bd9-85e3-13e9cba9c42b {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.371114] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a81f45b-fa74-499f-a670-b9dfcbd39968 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.378454] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea36ac85-8ffa-4a81-8cef-0f73ad17bad6 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.392485] env[62503]: DEBUG nova.compute.provider_tree [None req-f5db60c5-fc27-48c4-aa82-4f7bd2779675 tempest-ServersTestBootFromVolume-938742055 tempest-ServersTestBootFromVolume-938742055-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 690.627372] env[62503]: DEBUG oslo_concurrency.lockutils [None req-b638fdbf-443c-4fc0-ac4f-b9d527537434 tempest-ImagesNegativeTestJSON-829730830 tempest-ImagesNegativeTestJSON-829730830-project-member] Releasing lock "refresh_cache-e49252e3-11cc-49c3-b959-24ad87ad48c9" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 690.627726] env[62503]: DEBUG nova.compute.manager [None req-b638fdbf-443c-4fc0-ac4f-b9d527537434 tempest-ImagesNegativeTestJSON-829730830 tempest-ImagesNegativeTestJSON-829730830-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62503) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3032}} [ 690.627919] env[62503]: DEBUG nova.compute.manager [None req-b638fdbf-443c-4fc0-ac4f-b9d527537434 tempest-ImagesNegativeTestJSON-829730830 tempest-ImagesNegativeTestJSON-829730830-project-member] [instance: e49252e3-11cc-49c3-b959-24ad87ad48c9] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 690.628141] env[62503]: DEBUG nova.network.neutron [None req-b638fdbf-443c-4fc0-ac4f-b9d527537434 tempest-ImagesNegativeTestJSON-829730830 tempest-ImagesNegativeTestJSON-829730830-project-member] [instance: e49252e3-11cc-49c3-b959-24ad87ad48c9] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 690.641943] env[62503]: DEBUG nova.network.neutron [None req-b638fdbf-443c-4fc0-ac4f-b9d527537434 tempest-ImagesNegativeTestJSON-829730830 tempest-ImagesNegativeTestJSON-829730830-project-member] [instance: e49252e3-11cc-49c3-b959-24ad87ad48c9] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 690.781262] env[62503]: INFO nova.scheduler.client.report [None req-812ef84d-18c3-4e01-ae3f-ea5c4ae1494f tempest-ListImageFiltersTestJSON-1898919175 tempest-ListImageFiltersTestJSON-1898919175-project-member] Deleted allocations for instance c32f170d-1e88-4716-a02a-b8db6896e900 [ 690.895621] env[62503]: DEBUG nova.scheduler.client.report [None req-f5db60c5-fc27-48c4-aa82-4f7bd2779675 tempest-ServersTestBootFromVolume-938742055 tempest-ServersTestBootFromVolume-938742055-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 691.145983] env[62503]: DEBUG nova.network.neutron [None req-b638fdbf-443c-4fc0-ac4f-b9d527537434 tempest-ImagesNegativeTestJSON-829730830 tempest-ImagesNegativeTestJSON-829730830-project-member] [instance: e49252e3-11cc-49c3-b959-24ad87ad48c9] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 691.288794] env[62503]: DEBUG oslo_concurrency.lockutils [None req-812ef84d-18c3-4e01-ae3f-ea5c4ae1494f tempest-ListImageFiltersTestJSON-1898919175 tempest-ListImageFiltersTestJSON-1898919175-project-member] Lock "c32f170d-1e88-4716-a02a-b8db6896e900" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 111.440s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 691.400287] env[62503]: DEBUG oslo_concurrency.lockutils [None req-f5db60c5-fc27-48c4-aa82-4f7bd2779675 tempest-ServersTestBootFromVolume-938742055 tempest-ServersTestBootFromVolume-938742055-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.881s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 691.400935] env[62503]: ERROR nova.compute.manager [None req-f5db60c5-fc27-48c4-aa82-4f7bd2779675 tempest-ServersTestBootFromVolume-938742055 tempest-ServersTestBootFromVolume-938742055-project-member] [instance: 5ca1a33c-7324-481c-95cd-3761ce8ccf13] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 3298a0ff-80dc-479b-b87e-5cc3c0b3d16e, please check neutron logs for more information. [ 691.400935] env[62503]: ERROR nova.compute.manager [instance: 5ca1a33c-7324-481c-95cd-3761ce8ccf13] Traceback (most recent call last): [ 691.400935] env[62503]: ERROR nova.compute.manager [instance: 5ca1a33c-7324-481c-95cd-3761ce8ccf13] File "/opt/stack/nova/nova/compute/manager.py", line 2644, in _build_and_run_instance [ 691.400935] env[62503]: ERROR nova.compute.manager [instance: 5ca1a33c-7324-481c-95cd-3761ce8ccf13] self.driver.spawn(context, instance, image_meta, [ 691.400935] env[62503]: ERROR nova.compute.manager [instance: 5ca1a33c-7324-481c-95cd-3761ce8ccf13] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 691.400935] env[62503]: ERROR nova.compute.manager [instance: 5ca1a33c-7324-481c-95cd-3761ce8ccf13] self._vmops.spawn(context, instance, image_meta, injected_files, [ 691.400935] env[62503]: ERROR nova.compute.manager [instance: 5ca1a33c-7324-481c-95cd-3761ce8ccf13] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 691.400935] env[62503]: ERROR nova.compute.manager [instance: 5ca1a33c-7324-481c-95cd-3761ce8ccf13] vm_ref = self.build_virtual_machine(instance, [ 691.400935] env[62503]: ERROR nova.compute.manager [instance: 5ca1a33c-7324-481c-95cd-3761ce8ccf13] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 691.400935] env[62503]: ERROR nova.compute.manager [instance: 5ca1a33c-7324-481c-95cd-3761ce8ccf13] vif_infos = vmwarevif.get_vif_info(self._session, [ 691.400935] env[62503]: ERROR nova.compute.manager [instance: 5ca1a33c-7324-481c-95cd-3761ce8ccf13] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 691.401317] env[62503]: ERROR nova.compute.manager [instance: 5ca1a33c-7324-481c-95cd-3761ce8ccf13] for vif in network_info: [ 691.401317] env[62503]: ERROR nova.compute.manager [instance: 5ca1a33c-7324-481c-95cd-3761ce8ccf13] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 691.401317] env[62503]: ERROR nova.compute.manager [instance: 5ca1a33c-7324-481c-95cd-3761ce8ccf13] return self._sync_wrapper(fn, *args, **kwargs) [ 691.401317] env[62503]: ERROR nova.compute.manager [instance: 5ca1a33c-7324-481c-95cd-3761ce8ccf13] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 691.401317] env[62503]: ERROR nova.compute.manager [instance: 5ca1a33c-7324-481c-95cd-3761ce8ccf13] self.wait() [ 691.401317] env[62503]: ERROR nova.compute.manager [instance: 5ca1a33c-7324-481c-95cd-3761ce8ccf13] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 691.401317] env[62503]: ERROR nova.compute.manager [instance: 5ca1a33c-7324-481c-95cd-3761ce8ccf13] self[:] = self._gt.wait() [ 691.401317] env[62503]: ERROR nova.compute.manager [instance: 5ca1a33c-7324-481c-95cd-3761ce8ccf13] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 691.401317] env[62503]: ERROR nova.compute.manager [instance: 5ca1a33c-7324-481c-95cd-3761ce8ccf13] return self._exit_event.wait() [ 691.401317] env[62503]: ERROR nova.compute.manager [instance: 5ca1a33c-7324-481c-95cd-3761ce8ccf13] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 691.401317] env[62503]: ERROR nova.compute.manager [instance: 5ca1a33c-7324-481c-95cd-3761ce8ccf13] current.throw(*self._exc) [ 691.401317] env[62503]: ERROR nova.compute.manager [instance: 5ca1a33c-7324-481c-95cd-3761ce8ccf13] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 691.401317] env[62503]: ERROR nova.compute.manager [instance: 5ca1a33c-7324-481c-95cd-3761ce8ccf13] result = function(*args, **kwargs) [ 691.401858] env[62503]: ERROR nova.compute.manager [instance: 5ca1a33c-7324-481c-95cd-3761ce8ccf13] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 691.401858] env[62503]: ERROR nova.compute.manager [instance: 5ca1a33c-7324-481c-95cd-3761ce8ccf13] return func(*args, **kwargs) [ 691.401858] env[62503]: ERROR nova.compute.manager [instance: 5ca1a33c-7324-481c-95cd-3761ce8ccf13] File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 691.401858] env[62503]: ERROR nova.compute.manager [instance: 5ca1a33c-7324-481c-95cd-3761ce8ccf13] raise e [ 691.401858] env[62503]: ERROR nova.compute.manager [instance: 5ca1a33c-7324-481c-95cd-3761ce8ccf13] File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 691.401858] env[62503]: ERROR nova.compute.manager [instance: 5ca1a33c-7324-481c-95cd-3761ce8ccf13] nwinfo = self.network_api.allocate_for_instance( [ 691.401858] env[62503]: ERROR nova.compute.manager [instance: 5ca1a33c-7324-481c-95cd-3761ce8ccf13] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 691.401858] env[62503]: ERROR nova.compute.manager [instance: 5ca1a33c-7324-481c-95cd-3761ce8ccf13] created_port_ids = self._update_ports_for_instance( [ 691.401858] env[62503]: ERROR nova.compute.manager [instance: 5ca1a33c-7324-481c-95cd-3761ce8ccf13] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 691.401858] env[62503]: ERROR nova.compute.manager [instance: 5ca1a33c-7324-481c-95cd-3761ce8ccf13] with excutils.save_and_reraise_exception(): [ 691.401858] env[62503]: ERROR nova.compute.manager [instance: 5ca1a33c-7324-481c-95cd-3761ce8ccf13] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 691.401858] env[62503]: ERROR nova.compute.manager [instance: 5ca1a33c-7324-481c-95cd-3761ce8ccf13] self.force_reraise() [ 691.401858] env[62503]: ERROR nova.compute.manager [instance: 5ca1a33c-7324-481c-95cd-3761ce8ccf13] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 691.402784] env[62503]: ERROR nova.compute.manager [instance: 5ca1a33c-7324-481c-95cd-3761ce8ccf13] raise self.value [ 691.402784] env[62503]: ERROR nova.compute.manager [instance: 5ca1a33c-7324-481c-95cd-3761ce8ccf13] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 691.402784] env[62503]: ERROR nova.compute.manager [instance: 5ca1a33c-7324-481c-95cd-3761ce8ccf13] updated_port = self._update_port( [ 691.402784] env[62503]: ERROR nova.compute.manager [instance: 5ca1a33c-7324-481c-95cd-3761ce8ccf13] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 691.402784] env[62503]: ERROR nova.compute.manager [instance: 5ca1a33c-7324-481c-95cd-3761ce8ccf13] _ensure_no_port_binding_failure(port) [ 691.402784] env[62503]: ERROR nova.compute.manager [instance: 5ca1a33c-7324-481c-95cd-3761ce8ccf13] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 691.402784] env[62503]: ERROR nova.compute.manager [instance: 5ca1a33c-7324-481c-95cd-3761ce8ccf13] raise exception.PortBindingFailed(port_id=port['id']) [ 691.402784] env[62503]: ERROR nova.compute.manager [instance: 5ca1a33c-7324-481c-95cd-3761ce8ccf13] nova.exception.PortBindingFailed: Binding failed for port 3298a0ff-80dc-479b-b87e-5cc3c0b3d16e, please check neutron logs for more information. [ 691.402784] env[62503]: ERROR nova.compute.manager [instance: 5ca1a33c-7324-481c-95cd-3761ce8ccf13] [ 691.402784] env[62503]: DEBUG nova.compute.utils [None req-f5db60c5-fc27-48c4-aa82-4f7bd2779675 tempest-ServersTestBootFromVolume-938742055 tempest-ServersTestBootFromVolume-938742055-project-member] [instance: 5ca1a33c-7324-481c-95cd-3761ce8ccf13] Binding failed for port 3298a0ff-80dc-479b-b87e-5cc3c0b3d16e, please check neutron logs for more information. {{(pid=62503) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 691.403135] env[62503]: DEBUG oslo_concurrency.lockutils [None req-810913a5-54c9-4615-8817-f2307b8447be tempest-AttachVolumeTestJSON-1238928908 tempest-AttachVolumeTestJSON-1238928908-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 26.193s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 691.406415] env[62503]: DEBUG nova.compute.manager [None req-f5db60c5-fc27-48c4-aa82-4f7bd2779675 tempest-ServersTestBootFromVolume-938742055 tempest-ServersTestBootFromVolume-938742055-project-member] [instance: 5ca1a33c-7324-481c-95cd-3761ce8ccf13] Build of instance 5ca1a33c-7324-481c-95cd-3761ce8ccf13 was re-scheduled: Binding failed for port 3298a0ff-80dc-479b-b87e-5cc3c0b3d16e, please check neutron logs for more information. {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2483}} [ 691.406980] env[62503]: DEBUG nova.compute.manager [None req-f5db60c5-fc27-48c4-aa82-4f7bd2779675 tempest-ServersTestBootFromVolume-938742055 tempest-ServersTestBootFromVolume-938742055-project-member] [instance: 5ca1a33c-7324-481c-95cd-3761ce8ccf13] Unplugging VIFs for instance {{(pid=62503) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3009}} [ 691.407296] env[62503]: DEBUG oslo_concurrency.lockutils [None req-f5db60c5-fc27-48c4-aa82-4f7bd2779675 tempest-ServersTestBootFromVolume-938742055 tempest-ServersTestBootFromVolume-938742055-project-member] Acquiring lock "refresh_cache-5ca1a33c-7324-481c-95cd-3761ce8ccf13" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 691.407445] env[62503]: DEBUG oslo_concurrency.lockutils [None req-f5db60c5-fc27-48c4-aa82-4f7bd2779675 tempest-ServersTestBootFromVolume-938742055 tempest-ServersTestBootFromVolume-938742055-project-member] Acquired lock "refresh_cache-5ca1a33c-7324-481c-95cd-3761ce8ccf13" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 691.407604] env[62503]: DEBUG nova.network.neutron [None req-f5db60c5-fc27-48c4-aa82-4f7bd2779675 tempest-ServersTestBootFromVolume-938742055 tempest-ServersTestBootFromVolume-938742055-project-member] [instance: 5ca1a33c-7324-481c-95cd-3761ce8ccf13] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 691.648850] env[62503]: INFO nova.compute.manager [None req-b638fdbf-443c-4fc0-ac4f-b9d527537434 tempest-ImagesNegativeTestJSON-829730830 tempest-ImagesNegativeTestJSON-829730830-project-member] [instance: e49252e3-11cc-49c3-b959-24ad87ad48c9] Took 1.02 seconds to deallocate network for instance. [ 691.791571] env[62503]: DEBUG nova.compute.manager [None req-3e326ade-9b42-48b9-9ec9-471f7156ba7e tempest-AttachInterfacesV270Test-1411655473 tempest-AttachInterfacesV270Test-1411655473-project-member] [instance: 9bda2d4c-38c0-49ba-9a69-402869ff6a65] Starting instance... {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 691.930789] env[62503]: DEBUG nova.network.neutron [None req-f5db60c5-fc27-48c4-aa82-4f7bd2779675 tempest-ServersTestBootFromVolume-938742055 tempest-ServersTestBootFromVolume-938742055-project-member] [instance: 5ca1a33c-7324-481c-95cd-3761ce8ccf13] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 692.026720] env[62503]: DEBUG nova.network.neutron [None req-f5db60c5-fc27-48c4-aa82-4f7bd2779675 tempest-ServersTestBootFromVolume-938742055 tempest-ServersTestBootFromVolume-938742055-project-member] [instance: 5ca1a33c-7324-481c-95cd-3761ce8ccf13] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 692.216452] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9bafbb5-0241-4b9e-823e-b206689de94d {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.224551] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d9fc5eb-ec27-4bd9-b0d3-4240ec65433c {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.255432] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2958489-00dd-4769-8186-c8b160627335 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.263338] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04221e74-a9d1-42c0-874c-8cdb6f2dae46 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.276489] env[62503]: DEBUG nova.compute.provider_tree [None req-810913a5-54c9-4615-8817-f2307b8447be tempest-AttachVolumeTestJSON-1238928908 tempest-AttachVolumeTestJSON-1238928908-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 692.310473] env[62503]: DEBUG oslo_concurrency.lockutils [None req-3e326ade-9b42-48b9-9ec9-471f7156ba7e tempest-AttachInterfacesV270Test-1411655473 tempest-AttachInterfacesV270Test-1411655473-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 692.530235] env[62503]: DEBUG oslo_concurrency.lockutils [None req-f5db60c5-fc27-48c4-aa82-4f7bd2779675 tempest-ServersTestBootFromVolume-938742055 tempest-ServersTestBootFromVolume-938742055-project-member] Releasing lock "refresh_cache-5ca1a33c-7324-481c-95cd-3761ce8ccf13" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 692.530641] env[62503]: DEBUG nova.compute.manager [None req-f5db60c5-fc27-48c4-aa82-4f7bd2779675 tempest-ServersTestBootFromVolume-938742055 tempest-ServersTestBootFromVolume-938742055-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62503) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3032}} [ 692.530641] env[62503]: DEBUG nova.compute.manager [None req-f5db60c5-fc27-48c4-aa82-4f7bd2779675 tempest-ServersTestBootFromVolume-938742055 tempest-ServersTestBootFromVolume-938742055-project-member] [instance: 5ca1a33c-7324-481c-95cd-3761ce8ccf13] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 692.530641] env[62503]: DEBUG nova.network.neutron [None req-f5db60c5-fc27-48c4-aa82-4f7bd2779675 tempest-ServersTestBootFromVolume-938742055 tempest-ServersTestBootFromVolume-938742055-project-member] [instance: 5ca1a33c-7324-481c-95cd-3761ce8ccf13] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 692.552900] env[62503]: DEBUG nova.network.neutron [None req-f5db60c5-fc27-48c4-aa82-4f7bd2779675 tempest-ServersTestBootFromVolume-938742055 tempest-ServersTestBootFromVolume-938742055-project-member] [instance: 5ca1a33c-7324-481c-95cd-3761ce8ccf13] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 692.693221] env[62503]: INFO nova.scheduler.client.report [None req-b638fdbf-443c-4fc0-ac4f-b9d527537434 tempest-ImagesNegativeTestJSON-829730830 tempest-ImagesNegativeTestJSON-829730830-project-member] Deleted allocations for instance e49252e3-11cc-49c3-b959-24ad87ad48c9 [ 692.779820] env[62503]: DEBUG nova.scheduler.client.report [None req-810913a5-54c9-4615-8817-f2307b8447be tempest-AttachVolumeTestJSON-1238928908 tempest-AttachVolumeTestJSON-1238928908-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 693.060325] env[62503]: DEBUG nova.network.neutron [None req-f5db60c5-fc27-48c4-aa82-4f7bd2779675 tempest-ServersTestBootFromVolume-938742055 tempest-ServersTestBootFromVolume-938742055-project-member] [instance: 5ca1a33c-7324-481c-95cd-3761ce8ccf13] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 693.201712] env[62503]: DEBUG oslo_concurrency.lockutils [None req-b638fdbf-443c-4fc0-ac4f-b9d527537434 tempest-ImagesNegativeTestJSON-829730830 tempest-ImagesNegativeTestJSON-829730830-project-member] Lock "e49252e3-11cc-49c3-b959-24ad87ad48c9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 113.219s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 693.284768] env[62503]: DEBUG oslo_concurrency.lockutils [None req-810913a5-54c9-4615-8817-f2307b8447be tempest-AttachVolumeTestJSON-1238928908 tempest-AttachVolumeTestJSON-1238928908-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.882s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 693.285447] env[62503]: ERROR nova.compute.manager [None req-810913a5-54c9-4615-8817-f2307b8447be tempest-AttachVolumeTestJSON-1238928908 tempest-AttachVolumeTestJSON-1238928908-project-member] [instance: 2aa7880f-de24-4f32-b027-731a2030f987] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 18f34f69-8a6c-4c87-af89-d119386b3019, please check neutron logs for more information. [ 693.285447] env[62503]: ERROR nova.compute.manager [instance: 2aa7880f-de24-4f32-b027-731a2030f987] Traceback (most recent call last): [ 693.285447] env[62503]: ERROR nova.compute.manager [instance: 2aa7880f-de24-4f32-b027-731a2030f987] File "/opt/stack/nova/nova/compute/manager.py", line 2644, in _build_and_run_instance [ 693.285447] env[62503]: ERROR nova.compute.manager [instance: 2aa7880f-de24-4f32-b027-731a2030f987] self.driver.spawn(context, instance, image_meta, [ 693.285447] env[62503]: ERROR nova.compute.manager [instance: 2aa7880f-de24-4f32-b027-731a2030f987] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 693.285447] env[62503]: ERROR nova.compute.manager [instance: 2aa7880f-de24-4f32-b027-731a2030f987] self._vmops.spawn(context, instance, image_meta, injected_files, [ 693.285447] env[62503]: ERROR nova.compute.manager [instance: 2aa7880f-de24-4f32-b027-731a2030f987] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 693.285447] env[62503]: ERROR nova.compute.manager [instance: 2aa7880f-de24-4f32-b027-731a2030f987] vm_ref = self.build_virtual_machine(instance, [ 693.285447] env[62503]: ERROR nova.compute.manager [instance: 2aa7880f-de24-4f32-b027-731a2030f987] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 693.285447] env[62503]: ERROR nova.compute.manager [instance: 2aa7880f-de24-4f32-b027-731a2030f987] vif_infos = vmwarevif.get_vif_info(self._session, [ 693.285447] env[62503]: ERROR nova.compute.manager [instance: 2aa7880f-de24-4f32-b027-731a2030f987] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 693.285977] env[62503]: ERROR nova.compute.manager [instance: 2aa7880f-de24-4f32-b027-731a2030f987] for vif in network_info: [ 693.285977] env[62503]: ERROR nova.compute.manager [instance: 2aa7880f-de24-4f32-b027-731a2030f987] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 693.285977] env[62503]: ERROR nova.compute.manager [instance: 2aa7880f-de24-4f32-b027-731a2030f987] return self._sync_wrapper(fn, *args, **kwargs) [ 693.285977] env[62503]: ERROR nova.compute.manager [instance: 2aa7880f-de24-4f32-b027-731a2030f987] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 693.285977] env[62503]: ERROR nova.compute.manager [instance: 2aa7880f-de24-4f32-b027-731a2030f987] self.wait() [ 693.285977] env[62503]: ERROR nova.compute.manager [instance: 2aa7880f-de24-4f32-b027-731a2030f987] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 693.285977] env[62503]: ERROR nova.compute.manager [instance: 2aa7880f-de24-4f32-b027-731a2030f987] self[:] = self._gt.wait() [ 693.285977] env[62503]: ERROR nova.compute.manager [instance: 2aa7880f-de24-4f32-b027-731a2030f987] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 693.285977] env[62503]: ERROR nova.compute.manager [instance: 2aa7880f-de24-4f32-b027-731a2030f987] return self._exit_event.wait() [ 693.285977] env[62503]: ERROR nova.compute.manager [instance: 2aa7880f-de24-4f32-b027-731a2030f987] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 693.285977] env[62503]: ERROR nova.compute.manager [instance: 2aa7880f-de24-4f32-b027-731a2030f987] current.throw(*self._exc) [ 693.285977] env[62503]: ERROR nova.compute.manager [instance: 2aa7880f-de24-4f32-b027-731a2030f987] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 693.285977] env[62503]: ERROR nova.compute.manager [instance: 2aa7880f-de24-4f32-b027-731a2030f987] result = function(*args, **kwargs) [ 693.286669] env[62503]: ERROR nova.compute.manager [instance: 2aa7880f-de24-4f32-b027-731a2030f987] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 693.286669] env[62503]: ERROR nova.compute.manager [instance: 2aa7880f-de24-4f32-b027-731a2030f987] return func(*args, **kwargs) [ 693.286669] env[62503]: ERROR nova.compute.manager [instance: 2aa7880f-de24-4f32-b027-731a2030f987] File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 693.286669] env[62503]: ERROR nova.compute.manager [instance: 2aa7880f-de24-4f32-b027-731a2030f987] raise e [ 693.286669] env[62503]: ERROR nova.compute.manager [instance: 2aa7880f-de24-4f32-b027-731a2030f987] File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 693.286669] env[62503]: ERROR nova.compute.manager [instance: 2aa7880f-de24-4f32-b027-731a2030f987] nwinfo = self.network_api.allocate_for_instance( [ 693.286669] env[62503]: ERROR nova.compute.manager [instance: 2aa7880f-de24-4f32-b027-731a2030f987] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 693.286669] env[62503]: ERROR nova.compute.manager [instance: 2aa7880f-de24-4f32-b027-731a2030f987] created_port_ids = self._update_ports_for_instance( [ 693.286669] env[62503]: ERROR nova.compute.manager [instance: 2aa7880f-de24-4f32-b027-731a2030f987] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 693.286669] env[62503]: ERROR nova.compute.manager [instance: 2aa7880f-de24-4f32-b027-731a2030f987] with excutils.save_and_reraise_exception(): [ 693.286669] env[62503]: ERROR nova.compute.manager [instance: 2aa7880f-de24-4f32-b027-731a2030f987] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 693.286669] env[62503]: ERROR nova.compute.manager [instance: 2aa7880f-de24-4f32-b027-731a2030f987] self.force_reraise() [ 693.286669] env[62503]: ERROR nova.compute.manager [instance: 2aa7880f-de24-4f32-b027-731a2030f987] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 693.287115] env[62503]: ERROR nova.compute.manager [instance: 2aa7880f-de24-4f32-b027-731a2030f987] raise self.value [ 693.287115] env[62503]: ERROR nova.compute.manager [instance: 2aa7880f-de24-4f32-b027-731a2030f987] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 693.287115] env[62503]: ERROR nova.compute.manager [instance: 2aa7880f-de24-4f32-b027-731a2030f987] updated_port = self._update_port( [ 693.287115] env[62503]: ERROR nova.compute.manager [instance: 2aa7880f-de24-4f32-b027-731a2030f987] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 693.287115] env[62503]: ERROR nova.compute.manager [instance: 2aa7880f-de24-4f32-b027-731a2030f987] _ensure_no_port_binding_failure(port) [ 693.287115] env[62503]: ERROR nova.compute.manager [instance: 2aa7880f-de24-4f32-b027-731a2030f987] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 693.287115] env[62503]: ERROR nova.compute.manager [instance: 2aa7880f-de24-4f32-b027-731a2030f987] raise exception.PortBindingFailed(port_id=port['id']) [ 693.287115] env[62503]: ERROR nova.compute.manager [instance: 2aa7880f-de24-4f32-b027-731a2030f987] nova.exception.PortBindingFailed: Binding failed for port 18f34f69-8a6c-4c87-af89-d119386b3019, please check neutron logs for more information. [ 693.287115] env[62503]: ERROR nova.compute.manager [instance: 2aa7880f-de24-4f32-b027-731a2030f987] [ 693.287115] env[62503]: DEBUG nova.compute.utils [None req-810913a5-54c9-4615-8817-f2307b8447be tempest-AttachVolumeTestJSON-1238928908 tempest-AttachVolumeTestJSON-1238928908-project-member] [instance: 2aa7880f-de24-4f32-b027-731a2030f987] Binding failed for port 18f34f69-8a6c-4c87-af89-d119386b3019, please check neutron logs for more information. {{(pid=62503) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 693.288480] env[62503]: DEBUG oslo_concurrency.lockutils [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.568s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 693.289573] env[62503]: INFO nova.compute.claims [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] [instance: f244a7c9-2f39-4f91-aeba-e5f36e7f79ef] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 693.292210] env[62503]: DEBUG nova.compute.manager [None req-810913a5-54c9-4615-8817-f2307b8447be tempest-AttachVolumeTestJSON-1238928908 tempest-AttachVolumeTestJSON-1238928908-project-member] [instance: 2aa7880f-de24-4f32-b027-731a2030f987] Build of instance 2aa7880f-de24-4f32-b027-731a2030f987 was re-scheduled: Binding failed for port 18f34f69-8a6c-4c87-af89-d119386b3019, please check neutron logs for more information. {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2483}} [ 693.292672] env[62503]: DEBUG nova.compute.manager [None req-810913a5-54c9-4615-8817-f2307b8447be tempest-AttachVolumeTestJSON-1238928908 tempest-AttachVolumeTestJSON-1238928908-project-member] [instance: 2aa7880f-de24-4f32-b027-731a2030f987] Unplugging VIFs for instance {{(pid=62503) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3009}} [ 693.292925] env[62503]: DEBUG oslo_concurrency.lockutils [None req-810913a5-54c9-4615-8817-f2307b8447be tempest-AttachVolumeTestJSON-1238928908 tempest-AttachVolumeTestJSON-1238928908-project-member] Acquiring lock "refresh_cache-2aa7880f-de24-4f32-b027-731a2030f987" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 693.293115] env[62503]: DEBUG oslo_concurrency.lockutils [None req-810913a5-54c9-4615-8817-f2307b8447be tempest-AttachVolumeTestJSON-1238928908 tempest-AttachVolumeTestJSON-1238928908-project-member] Acquired lock "refresh_cache-2aa7880f-de24-4f32-b027-731a2030f987" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 693.293443] env[62503]: DEBUG nova.network.neutron [None req-810913a5-54c9-4615-8817-f2307b8447be tempest-AttachVolumeTestJSON-1238928908 tempest-AttachVolumeTestJSON-1238928908-project-member] [instance: 2aa7880f-de24-4f32-b027-731a2030f987] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 693.563601] env[62503]: INFO nova.compute.manager [None req-f5db60c5-fc27-48c4-aa82-4f7bd2779675 tempest-ServersTestBootFromVolume-938742055 tempest-ServersTestBootFromVolume-938742055-project-member] [instance: 5ca1a33c-7324-481c-95cd-3761ce8ccf13] Took 1.03 seconds to deallocate network for instance. [ 693.704400] env[62503]: DEBUG nova.compute.manager [None req-9a47e67e-9a92-496f-b283-39f380d35bee tempest-ServerAddressesTestJSON-1019310862 tempest-ServerAddressesTestJSON-1019310862-project-member] [instance: ce8d9b01-e99d-4051-bd96-659692a436da] Starting instance... {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 693.825372] env[62503]: DEBUG nova.network.neutron [None req-810913a5-54c9-4615-8817-f2307b8447be tempest-AttachVolumeTestJSON-1238928908 tempest-AttachVolumeTestJSON-1238928908-project-member] [instance: 2aa7880f-de24-4f32-b027-731a2030f987] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 693.914509] env[62503]: DEBUG nova.network.neutron [None req-810913a5-54c9-4615-8817-f2307b8447be tempest-AttachVolumeTestJSON-1238928908 tempest-AttachVolumeTestJSON-1238928908-project-member] [instance: 2aa7880f-de24-4f32-b027-731a2030f987] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 694.226245] env[62503]: DEBUG oslo_concurrency.lockutils [None req-9a47e67e-9a92-496f-b283-39f380d35bee tempest-ServerAddressesTestJSON-1019310862 tempest-ServerAddressesTestJSON-1019310862-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 694.416344] env[62503]: DEBUG oslo_concurrency.lockutils [None req-810913a5-54c9-4615-8817-f2307b8447be tempest-AttachVolumeTestJSON-1238928908 tempest-AttachVolumeTestJSON-1238928908-project-member] Releasing lock "refresh_cache-2aa7880f-de24-4f32-b027-731a2030f987" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 694.416891] env[62503]: DEBUG nova.compute.manager [None req-810913a5-54c9-4615-8817-f2307b8447be tempest-AttachVolumeTestJSON-1238928908 tempest-AttachVolumeTestJSON-1238928908-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62503) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3032}} [ 694.417212] env[62503]: DEBUG nova.compute.manager [None req-810913a5-54c9-4615-8817-f2307b8447be tempest-AttachVolumeTestJSON-1238928908 tempest-AttachVolumeTestJSON-1238928908-project-member] [instance: 2aa7880f-de24-4f32-b027-731a2030f987] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 694.417656] env[62503]: DEBUG nova.network.neutron [None req-810913a5-54c9-4615-8817-f2307b8447be tempest-AttachVolumeTestJSON-1238928908 tempest-AttachVolumeTestJSON-1238928908-project-member] [instance: 2aa7880f-de24-4f32-b027-731a2030f987] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 694.437122] env[62503]: DEBUG nova.network.neutron [None req-810913a5-54c9-4615-8817-f2307b8447be tempest-AttachVolumeTestJSON-1238928908 tempest-AttachVolumeTestJSON-1238928908-project-member] [instance: 2aa7880f-de24-4f32-b027-731a2030f987] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 694.605340] env[62503]: INFO nova.scheduler.client.report [None req-f5db60c5-fc27-48c4-aa82-4f7bd2779675 tempest-ServersTestBootFromVolume-938742055 tempest-ServersTestBootFromVolume-938742055-project-member] Deleted allocations for instance 5ca1a33c-7324-481c-95cd-3761ce8ccf13 [ 694.697106] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14e96a13-ad2d-415f-a648-ae1ba125b4f9 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.703596] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2483acff-f20a-44dc-bade-e3bed787c77b {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.736451] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c76e374-bc8f-471b-8c3d-c1722ee49191 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.744135] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36311fe8-d795-4c84-b731-0198d106da1c {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.757349] env[62503]: DEBUG nova.compute.provider_tree [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 694.942585] env[62503]: DEBUG nova.network.neutron [None req-810913a5-54c9-4615-8817-f2307b8447be tempest-AttachVolumeTestJSON-1238928908 tempest-AttachVolumeTestJSON-1238928908-project-member] [instance: 2aa7880f-de24-4f32-b027-731a2030f987] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 695.120111] env[62503]: DEBUG oslo_concurrency.lockutils [None req-f5db60c5-fc27-48c4-aa82-4f7bd2779675 tempest-ServersTestBootFromVolume-938742055 tempest-ServersTestBootFromVolume-938742055-project-member] Lock "5ca1a33c-7324-481c-95cd-3761ce8ccf13" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 114.221s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 695.260240] env[62503]: DEBUG nova.scheduler.client.report [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 695.448325] env[62503]: INFO nova.compute.manager [None req-810913a5-54c9-4615-8817-f2307b8447be tempest-AttachVolumeTestJSON-1238928908 tempest-AttachVolumeTestJSON-1238928908-project-member] [instance: 2aa7880f-de24-4f32-b027-731a2030f987] Took 1.03 seconds to deallocate network for instance. [ 695.622848] env[62503]: DEBUG nova.compute.manager [None req-b3d1021d-dab1-43cf-b5d3-632b4017ba90 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] [instance: 3178d5cd-1937-422b-9287-970d095aa452] Starting instance... {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 695.765768] env[62503]: DEBUG oslo_concurrency.lockutils [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.478s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 695.766435] env[62503]: DEBUG nova.compute.manager [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] [instance: f244a7c9-2f39-4f91-aeba-e5f36e7f79ef] Start building networks asynchronously for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2832}} [ 695.769203] env[62503]: DEBUG oslo_concurrency.lockutils [None req-04b5e2fe-420a-4980-828f-5aaaaac6b6d6 tempest-MigrationsAdminTest-1483146718 tempest-MigrationsAdminTest-1483146718-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 26.225s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 696.149217] env[62503]: DEBUG oslo_concurrency.lockutils [None req-b3d1021d-dab1-43cf-b5d3-632b4017ba90 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 696.273636] env[62503]: DEBUG nova.compute.utils [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] Using /dev/sd instead of None {{(pid=62503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 696.278957] env[62503]: DEBUG nova.compute.manager [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] [instance: f244a7c9-2f39-4f91-aeba-e5f36e7f79ef] Allocating IP information in the background. {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 696.279851] env[62503]: DEBUG nova.network.neutron [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] [instance: f244a7c9-2f39-4f91-aeba-e5f36e7f79ef] allocate_for_instance() {{(pid=62503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 696.357632] env[62503]: DEBUG nova.policy [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4808e54b983448c399112ade5bd3e543', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '15e74d975960428280fcb64d992b9c63', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62503) authorize /opt/stack/nova/nova/policy.py:201}} [ 696.443773] env[62503]: DEBUG oslo_concurrency.lockutils [None req-bed06986-6337-47b4-a3da-32c888b74905 tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] Acquiring lock "a4b600d2-b411-4957-92cb-7e8e462fde1d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 696.443773] env[62503]: DEBUG oslo_concurrency.lockutils [None req-bed06986-6337-47b4-a3da-32c888b74905 tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] Lock "a4b600d2-b411-4957-92cb-7e8e462fde1d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 696.489563] env[62503]: INFO nova.scheduler.client.report [None req-810913a5-54c9-4615-8817-f2307b8447be tempest-AttachVolumeTestJSON-1238928908 tempest-AttachVolumeTestJSON-1238928908-project-member] Deleted allocations for instance 2aa7880f-de24-4f32-b027-731a2030f987 [ 696.679499] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aaf6db60-928e-427e-8f0e-0afc473c2d46 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.687549] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b10264be-2733-4f60-acff-de9b29c79a6b {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.718801] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-497cc621-463c-4677-a060-8465a3878cee {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.728017] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f41763a-2c49-41db-b171-036c51577668 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.741071] env[62503]: DEBUG nova.compute.provider_tree [None req-04b5e2fe-420a-4980-828f-5aaaaac6b6d6 tempest-MigrationsAdminTest-1483146718 tempest-MigrationsAdminTest-1483146718-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 696.743124] env[62503]: DEBUG nova.network.neutron [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] [instance: f244a7c9-2f39-4f91-aeba-e5f36e7f79ef] Successfully created port: bf8b4134-287d-4636-923d-ae78e2eb3f0e {{(pid=62503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 696.782065] env[62503]: DEBUG nova.compute.manager [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] [instance: f244a7c9-2f39-4f91-aeba-e5f36e7f79ef] Start building block device mappings for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2867}} [ 697.003176] env[62503]: DEBUG oslo_concurrency.lockutils [None req-810913a5-54c9-4615-8817-f2307b8447be tempest-AttachVolumeTestJSON-1238928908 tempest-AttachVolumeTestJSON-1238928908-project-member] Lock "2aa7880f-de24-4f32-b027-731a2030f987" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 115.415s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 697.246609] env[62503]: DEBUG nova.scheduler.client.report [None req-04b5e2fe-420a-4980-828f-5aaaaac6b6d6 tempest-MigrationsAdminTest-1483146718 tempest-MigrationsAdminTest-1483146718-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 697.506126] env[62503]: DEBUG nova.compute.manager [None req-53bd46c4-9392-486d-a978-bb7729f67092 tempest-ServerActionsV293TestJSON-923794683 tempest-ServerActionsV293TestJSON-923794683-project-member] [instance: 97ac40d6-1c29-4282-86e5-be27a20cf5e0] Starting instance... {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 697.751386] env[62503]: DEBUG oslo_concurrency.lockutils [None req-04b5e2fe-420a-4980-828f-5aaaaac6b6d6 tempest-MigrationsAdminTest-1483146718 tempest-MigrationsAdminTest-1483146718-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.982s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 697.751833] env[62503]: ERROR nova.compute.manager [None req-04b5e2fe-420a-4980-828f-5aaaaac6b6d6 tempest-MigrationsAdminTest-1483146718 tempest-MigrationsAdminTest-1483146718-project-member] [instance: 0af1e65d-ca88-475e-a871-4087bd49cd9d] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 16aeed73-bb69-49c1-a84f-c3bcca7abfad, please check neutron logs for more information. [ 697.751833] env[62503]: ERROR nova.compute.manager [instance: 0af1e65d-ca88-475e-a871-4087bd49cd9d] Traceback (most recent call last): [ 697.751833] env[62503]: ERROR nova.compute.manager [instance: 0af1e65d-ca88-475e-a871-4087bd49cd9d] File "/opt/stack/nova/nova/compute/manager.py", line 2644, in _build_and_run_instance [ 697.751833] env[62503]: ERROR nova.compute.manager [instance: 0af1e65d-ca88-475e-a871-4087bd49cd9d] self.driver.spawn(context, instance, image_meta, [ 697.751833] env[62503]: ERROR nova.compute.manager [instance: 0af1e65d-ca88-475e-a871-4087bd49cd9d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 697.751833] env[62503]: ERROR nova.compute.manager [instance: 0af1e65d-ca88-475e-a871-4087bd49cd9d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 697.751833] env[62503]: ERROR nova.compute.manager [instance: 0af1e65d-ca88-475e-a871-4087bd49cd9d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 697.751833] env[62503]: ERROR nova.compute.manager [instance: 0af1e65d-ca88-475e-a871-4087bd49cd9d] vm_ref = self.build_virtual_machine(instance, [ 697.751833] env[62503]: ERROR nova.compute.manager [instance: 0af1e65d-ca88-475e-a871-4087bd49cd9d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 697.751833] env[62503]: ERROR nova.compute.manager [instance: 0af1e65d-ca88-475e-a871-4087bd49cd9d] vif_infos = vmwarevif.get_vif_info(self._session, [ 697.751833] env[62503]: ERROR nova.compute.manager [instance: 0af1e65d-ca88-475e-a871-4087bd49cd9d] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 697.752164] env[62503]: ERROR nova.compute.manager [instance: 0af1e65d-ca88-475e-a871-4087bd49cd9d] for vif in network_info: [ 697.752164] env[62503]: ERROR nova.compute.manager [instance: 0af1e65d-ca88-475e-a871-4087bd49cd9d] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 697.752164] env[62503]: ERROR nova.compute.manager [instance: 0af1e65d-ca88-475e-a871-4087bd49cd9d] return self._sync_wrapper(fn, *args, **kwargs) [ 697.752164] env[62503]: ERROR nova.compute.manager [instance: 0af1e65d-ca88-475e-a871-4087bd49cd9d] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 697.752164] env[62503]: ERROR nova.compute.manager [instance: 0af1e65d-ca88-475e-a871-4087bd49cd9d] self.wait() [ 697.752164] env[62503]: ERROR nova.compute.manager [instance: 0af1e65d-ca88-475e-a871-4087bd49cd9d] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 697.752164] env[62503]: ERROR nova.compute.manager [instance: 0af1e65d-ca88-475e-a871-4087bd49cd9d] self[:] = self._gt.wait() [ 697.752164] env[62503]: ERROR nova.compute.manager [instance: 0af1e65d-ca88-475e-a871-4087bd49cd9d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 697.752164] env[62503]: ERROR nova.compute.manager [instance: 0af1e65d-ca88-475e-a871-4087bd49cd9d] return self._exit_event.wait() [ 697.752164] env[62503]: ERROR nova.compute.manager [instance: 0af1e65d-ca88-475e-a871-4087bd49cd9d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 697.752164] env[62503]: ERROR nova.compute.manager [instance: 0af1e65d-ca88-475e-a871-4087bd49cd9d] current.throw(*self._exc) [ 697.752164] env[62503]: ERROR nova.compute.manager [instance: 0af1e65d-ca88-475e-a871-4087bd49cd9d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 697.752164] env[62503]: ERROR nova.compute.manager [instance: 0af1e65d-ca88-475e-a871-4087bd49cd9d] result = function(*args, **kwargs) [ 697.752494] env[62503]: ERROR nova.compute.manager [instance: 0af1e65d-ca88-475e-a871-4087bd49cd9d] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 697.752494] env[62503]: ERROR nova.compute.manager [instance: 0af1e65d-ca88-475e-a871-4087bd49cd9d] return func(*args, **kwargs) [ 697.752494] env[62503]: ERROR nova.compute.manager [instance: 0af1e65d-ca88-475e-a871-4087bd49cd9d] File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 697.752494] env[62503]: ERROR nova.compute.manager [instance: 0af1e65d-ca88-475e-a871-4087bd49cd9d] raise e [ 697.752494] env[62503]: ERROR nova.compute.manager [instance: 0af1e65d-ca88-475e-a871-4087bd49cd9d] File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 697.752494] env[62503]: ERROR nova.compute.manager [instance: 0af1e65d-ca88-475e-a871-4087bd49cd9d] nwinfo = self.network_api.allocate_for_instance( [ 697.752494] env[62503]: ERROR nova.compute.manager [instance: 0af1e65d-ca88-475e-a871-4087bd49cd9d] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 697.752494] env[62503]: ERROR nova.compute.manager [instance: 0af1e65d-ca88-475e-a871-4087bd49cd9d] created_port_ids = self._update_ports_for_instance( [ 697.752494] env[62503]: ERROR nova.compute.manager [instance: 0af1e65d-ca88-475e-a871-4087bd49cd9d] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 697.752494] env[62503]: ERROR nova.compute.manager [instance: 0af1e65d-ca88-475e-a871-4087bd49cd9d] with excutils.save_and_reraise_exception(): [ 697.752494] env[62503]: ERROR nova.compute.manager [instance: 0af1e65d-ca88-475e-a871-4087bd49cd9d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 697.752494] env[62503]: ERROR nova.compute.manager [instance: 0af1e65d-ca88-475e-a871-4087bd49cd9d] self.force_reraise() [ 697.752494] env[62503]: ERROR nova.compute.manager [instance: 0af1e65d-ca88-475e-a871-4087bd49cd9d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 697.753268] env[62503]: ERROR nova.compute.manager [instance: 0af1e65d-ca88-475e-a871-4087bd49cd9d] raise self.value [ 697.753268] env[62503]: ERROR nova.compute.manager [instance: 0af1e65d-ca88-475e-a871-4087bd49cd9d] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 697.753268] env[62503]: ERROR nova.compute.manager [instance: 0af1e65d-ca88-475e-a871-4087bd49cd9d] updated_port = self._update_port( [ 697.753268] env[62503]: ERROR nova.compute.manager [instance: 0af1e65d-ca88-475e-a871-4087bd49cd9d] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 697.753268] env[62503]: ERROR nova.compute.manager [instance: 0af1e65d-ca88-475e-a871-4087bd49cd9d] _ensure_no_port_binding_failure(port) [ 697.753268] env[62503]: ERROR nova.compute.manager [instance: 0af1e65d-ca88-475e-a871-4087bd49cd9d] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 697.753268] env[62503]: ERROR nova.compute.manager [instance: 0af1e65d-ca88-475e-a871-4087bd49cd9d] raise exception.PortBindingFailed(port_id=port['id']) [ 697.753268] env[62503]: ERROR nova.compute.manager [instance: 0af1e65d-ca88-475e-a871-4087bd49cd9d] nova.exception.PortBindingFailed: Binding failed for port 16aeed73-bb69-49c1-a84f-c3bcca7abfad, please check neutron logs for more information. [ 697.753268] env[62503]: ERROR nova.compute.manager [instance: 0af1e65d-ca88-475e-a871-4087bd49cd9d] [ 697.753268] env[62503]: DEBUG nova.compute.utils [None req-04b5e2fe-420a-4980-828f-5aaaaac6b6d6 tempest-MigrationsAdminTest-1483146718 tempest-MigrationsAdminTest-1483146718-project-member] [instance: 0af1e65d-ca88-475e-a871-4087bd49cd9d] Binding failed for port 16aeed73-bb69-49c1-a84f-c3bcca7abfad, please check neutron logs for more information. {{(pid=62503) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 697.753798] env[62503]: DEBUG oslo_concurrency.lockutils [None req-ffbee50c-b7fd-4096-9606-99a0d4599842 tempest-ServersTestJSON-1104182753 tempest-ServersTestJSON-1104182753-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 26.459s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 697.757425] env[62503]: DEBUG nova.compute.manager [None req-04b5e2fe-420a-4980-828f-5aaaaac6b6d6 tempest-MigrationsAdminTest-1483146718 tempest-MigrationsAdminTest-1483146718-project-member] [instance: 0af1e65d-ca88-475e-a871-4087bd49cd9d] Build of instance 0af1e65d-ca88-475e-a871-4087bd49cd9d was re-scheduled: Binding failed for port 16aeed73-bb69-49c1-a84f-c3bcca7abfad, please check neutron logs for more information. {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2483}} [ 697.757975] env[62503]: DEBUG nova.compute.manager [None req-04b5e2fe-420a-4980-828f-5aaaaac6b6d6 tempest-MigrationsAdminTest-1483146718 tempest-MigrationsAdminTest-1483146718-project-member] [instance: 0af1e65d-ca88-475e-a871-4087bd49cd9d] Unplugging VIFs for instance {{(pid=62503) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3009}} [ 697.758224] env[62503]: DEBUG oslo_concurrency.lockutils [None req-04b5e2fe-420a-4980-828f-5aaaaac6b6d6 tempest-MigrationsAdminTest-1483146718 tempest-MigrationsAdminTest-1483146718-project-member] Acquiring lock "refresh_cache-0af1e65d-ca88-475e-a871-4087bd49cd9d" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 697.758372] env[62503]: DEBUG oslo_concurrency.lockutils [None req-04b5e2fe-420a-4980-828f-5aaaaac6b6d6 tempest-MigrationsAdminTest-1483146718 tempest-MigrationsAdminTest-1483146718-project-member] Acquired lock "refresh_cache-0af1e65d-ca88-475e-a871-4087bd49cd9d" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 697.758531] env[62503]: DEBUG nova.network.neutron [None req-04b5e2fe-420a-4980-828f-5aaaaac6b6d6 tempest-MigrationsAdminTest-1483146718 tempest-MigrationsAdminTest-1483146718-project-member] [instance: 0af1e65d-ca88-475e-a871-4087bd49cd9d] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 697.788471] env[62503]: DEBUG nova.compute.manager [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] [instance: f244a7c9-2f39-4f91-aeba-e5f36e7f79ef] Start spawning the instance on the hypervisor. {{(pid=62503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2641}} [ 697.817481] env[62503]: DEBUG nova.virt.hardware [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:26:20Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:26:03Z,direct_url=,disk_format='vmdk',id=8150ca02-f879-471d-8913-459408f127a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26d9ee75d25b4018b8bb1fb11c8bd98c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:26:04Z,virtual_size=,visibility=), allow threads: False {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 697.817923] env[62503]: DEBUG nova.virt.hardware [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] Flavor limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 697.817923] env[62503]: DEBUG nova.virt.hardware [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] Image limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 697.818073] env[62503]: DEBUG nova.virt.hardware [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] Flavor pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 697.818230] env[62503]: DEBUG nova.virt.hardware [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] Image pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 697.818386] env[62503]: DEBUG nova.virt.hardware [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 697.818582] env[62503]: DEBUG nova.virt.hardware [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 697.818950] env[62503]: DEBUG nova.virt.hardware [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 697.818950] env[62503]: DEBUG nova.virt.hardware [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] Got 1 possible topologies {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 697.819104] env[62503]: DEBUG nova.virt.hardware [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 697.819415] env[62503]: DEBUG nova.virt.hardware [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 697.820422] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ea8ff88-9c63-4eb8-a7a7-a27880b711c8 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.828858] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6a2f3ca-6f57-4734-a010-9b37b7c0a946 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.045664] env[62503]: DEBUG oslo_concurrency.lockutils [None req-53bd46c4-9392-486d-a978-bb7729f67092 tempest-ServerActionsV293TestJSON-923794683 tempest-ServerActionsV293TestJSON-923794683-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 698.258508] env[62503]: DEBUG nova.compute.manager [req-71e3c1c7-e372-47f9-bbf9-a742c97c1cc3 req-d3cf83c2-8473-46b6-b68d-363fca6a1fd5 service nova] [instance: f244a7c9-2f39-4f91-aeba-e5f36e7f79ef] Received event network-changed-bf8b4134-287d-4636-923d-ae78e2eb3f0e {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 698.258508] env[62503]: DEBUG nova.compute.manager [req-71e3c1c7-e372-47f9-bbf9-a742c97c1cc3 req-d3cf83c2-8473-46b6-b68d-363fca6a1fd5 service nova] [instance: f244a7c9-2f39-4f91-aeba-e5f36e7f79ef] Refreshing instance network info cache due to event network-changed-bf8b4134-287d-4636-923d-ae78e2eb3f0e. {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11432}} [ 698.258508] env[62503]: DEBUG oslo_concurrency.lockutils [req-71e3c1c7-e372-47f9-bbf9-a742c97c1cc3 req-d3cf83c2-8473-46b6-b68d-363fca6a1fd5 service nova] Acquiring lock "refresh_cache-f244a7c9-2f39-4f91-aeba-e5f36e7f79ef" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 698.258508] env[62503]: DEBUG oslo_concurrency.lockutils [req-71e3c1c7-e372-47f9-bbf9-a742c97c1cc3 req-d3cf83c2-8473-46b6-b68d-363fca6a1fd5 service nova] Acquired lock "refresh_cache-f244a7c9-2f39-4f91-aeba-e5f36e7f79ef" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 698.258712] env[62503]: DEBUG nova.network.neutron [req-71e3c1c7-e372-47f9-bbf9-a742c97c1cc3 req-d3cf83c2-8473-46b6-b68d-363fca6a1fd5 service nova] [instance: f244a7c9-2f39-4f91-aeba-e5f36e7f79ef] Refreshing network info cache for port bf8b4134-287d-4636-923d-ae78e2eb3f0e {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 698.464647] env[62503]: ERROR nova.compute.manager [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port bf8b4134-287d-4636-923d-ae78e2eb3f0e, please check neutron logs for more information. [ 698.464647] env[62503]: ERROR nova.compute.manager Traceback (most recent call last): [ 698.464647] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 698.464647] env[62503]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 698.464647] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 698.464647] env[62503]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 698.464647] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 698.464647] env[62503]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 698.464647] env[62503]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 698.464647] env[62503]: ERROR nova.compute.manager self.force_reraise() [ 698.464647] env[62503]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 698.464647] env[62503]: ERROR nova.compute.manager raise self.value [ 698.464647] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 698.464647] env[62503]: ERROR nova.compute.manager updated_port = self._update_port( [ 698.464647] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 698.464647] env[62503]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 698.465131] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 698.465131] env[62503]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 698.465131] env[62503]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port bf8b4134-287d-4636-923d-ae78e2eb3f0e, please check neutron logs for more information. [ 698.465131] env[62503]: ERROR nova.compute.manager [ 698.465131] env[62503]: Traceback (most recent call last): [ 698.465131] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 698.465131] env[62503]: listener.cb(fileno) [ 698.465131] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 698.465131] env[62503]: result = function(*args, **kwargs) [ 698.465131] env[62503]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 698.465131] env[62503]: return func(*args, **kwargs) [ 698.465131] env[62503]: File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 698.465131] env[62503]: raise e [ 698.465131] env[62503]: File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 698.465131] env[62503]: nwinfo = self.network_api.allocate_for_instance( [ 698.465131] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 698.465131] env[62503]: created_port_ids = self._update_ports_for_instance( [ 698.465131] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 698.465131] env[62503]: with excutils.save_and_reraise_exception(): [ 698.465131] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 698.465131] env[62503]: self.force_reraise() [ 698.465131] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 698.465131] env[62503]: raise self.value [ 698.465131] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 698.465131] env[62503]: updated_port = self._update_port( [ 698.465131] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 698.465131] env[62503]: _ensure_no_port_binding_failure(port) [ 698.465131] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 698.465131] env[62503]: raise exception.PortBindingFailed(port_id=port['id']) [ 698.465913] env[62503]: nova.exception.PortBindingFailed: Binding failed for port bf8b4134-287d-4636-923d-ae78e2eb3f0e, please check neutron logs for more information. [ 698.465913] env[62503]: Removing descriptor: 16 [ 698.465913] env[62503]: ERROR nova.compute.manager [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] [instance: f244a7c9-2f39-4f91-aeba-e5f36e7f79ef] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port bf8b4134-287d-4636-923d-ae78e2eb3f0e, please check neutron logs for more information. [ 698.465913] env[62503]: ERROR nova.compute.manager [instance: f244a7c9-2f39-4f91-aeba-e5f36e7f79ef] Traceback (most recent call last): [ 698.465913] env[62503]: ERROR nova.compute.manager [instance: f244a7c9-2f39-4f91-aeba-e5f36e7f79ef] File "/opt/stack/nova/nova/compute/manager.py", line 2897, in _build_resources [ 698.465913] env[62503]: ERROR nova.compute.manager [instance: f244a7c9-2f39-4f91-aeba-e5f36e7f79ef] yield resources [ 698.465913] env[62503]: ERROR nova.compute.manager [instance: f244a7c9-2f39-4f91-aeba-e5f36e7f79ef] File "/opt/stack/nova/nova/compute/manager.py", line 2644, in _build_and_run_instance [ 698.465913] env[62503]: ERROR nova.compute.manager [instance: f244a7c9-2f39-4f91-aeba-e5f36e7f79ef] self.driver.spawn(context, instance, image_meta, [ 698.465913] env[62503]: ERROR nova.compute.manager [instance: f244a7c9-2f39-4f91-aeba-e5f36e7f79ef] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 698.465913] env[62503]: ERROR nova.compute.manager [instance: f244a7c9-2f39-4f91-aeba-e5f36e7f79ef] self._vmops.spawn(context, instance, image_meta, injected_files, [ 698.465913] env[62503]: ERROR nova.compute.manager [instance: f244a7c9-2f39-4f91-aeba-e5f36e7f79ef] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 698.465913] env[62503]: ERROR nova.compute.manager [instance: f244a7c9-2f39-4f91-aeba-e5f36e7f79ef] vm_ref = self.build_virtual_machine(instance, [ 698.466220] env[62503]: ERROR nova.compute.manager [instance: f244a7c9-2f39-4f91-aeba-e5f36e7f79ef] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 698.466220] env[62503]: ERROR nova.compute.manager [instance: f244a7c9-2f39-4f91-aeba-e5f36e7f79ef] vif_infos = vmwarevif.get_vif_info(self._session, [ 698.466220] env[62503]: ERROR nova.compute.manager [instance: f244a7c9-2f39-4f91-aeba-e5f36e7f79ef] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 698.466220] env[62503]: ERROR nova.compute.manager [instance: f244a7c9-2f39-4f91-aeba-e5f36e7f79ef] for vif in network_info: [ 698.466220] env[62503]: ERROR nova.compute.manager [instance: f244a7c9-2f39-4f91-aeba-e5f36e7f79ef] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 698.466220] env[62503]: ERROR nova.compute.manager [instance: f244a7c9-2f39-4f91-aeba-e5f36e7f79ef] return self._sync_wrapper(fn, *args, **kwargs) [ 698.466220] env[62503]: ERROR nova.compute.manager [instance: f244a7c9-2f39-4f91-aeba-e5f36e7f79ef] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 698.466220] env[62503]: ERROR nova.compute.manager [instance: f244a7c9-2f39-4f91-aeba-e5f36e7f79ef] self.wait() [ 698.466220] env[62503]: ERROR nova.compute.manager [instance: f244a7c9-2f39-4f91-aeba-e5f36e7f79ef] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 698.466220] env[62503]: ERROR nova.compute.manager [instance: f244a7c9-2f39-4f91-aeba-e5f36e7f79ef] self[:] = self._gt.wait() [ 698.466220] env[62503]: ERROR nova.compute.manager [instance: f244a7c9-2f39-4f91-aeba-e5f36e7f79ef] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 698.466220] env[62503]: ERROR nova.compute.manager [instance: f244a7c9-2f39-4f91-aeba-e5f36e7f79ef] return self._exit_event.wait() [ 698.466220] env[62503]: ERROR nova.compute.manager [instance: f244a7c9-2f39-4f91-aeba-e5f36e7f79ef] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 698.466538] env[62503]: ERROR nova.compute.manager [instance: f244a7c9-2f39-4f91-aeba-e5f36e7f79ef] result = hub.switch() [ 698.466538] env[62503]: ERROR nova.compute.manager [instance: f244a7c9-2f39-4f91-aeba-e5f36e7f79ef] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 698.466538] env[62503]: ERROR nova.compute.manager [instance: f244a7c9-2f39-4f91-aeba-e5f36e7f79ef] return self.greenlet.switch() [ 698.466538] env[62503]: ERROR nova.compute.manager [instance: f244a7c9-2f39-4f91-aeba-e5f36e7f79ef] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 698.466538] env[62503]: ERROR nova.compute.manager [instance: f244a7c9-2f39-4f91-aeba-e5f36e7f79ef] result = function(*args, **kwargs) [ 698.466538] env[62503]: ERROR nova.compute.manager [instance: f244a7c9-2f39-4f91-aeba-e5f36e7f79ef] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 698.466538] env[62503]: ERROR nova.compute.manager [instance: f244a7c9-2f39-4f91-aeba-e5f36e7f79ef] return func(*args, **kwargs) [ 698.466538] env[62503]: ERROR nova.compute.manager [instance: f244a7c9-2f39-4f91-aeba-e5f36e7f79ef] File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 698.466538] env[62503]: ERROR nova.compute.manager [instance: f244a7c9-2f39-4f91-aeba-e5f36e7f79ef] raise e [ 698.466538] env[62503]: ERROR nova.compute.manager [instance: f244a7c9-2f39-4f91-aeba-e5f36e7f79ef] File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 698.466538] env[62503]: ERROR nova.compute.manager [instance: f244a7c9-2f39-4f91-aeba-e5f36e7f79ef] nwinfo = self.network_api.allocate_for_instance( [ 698.466538] env[62503]: ERROR nova.compute.manager [instance: f244a7c9-2f39-4f91-aeba-e5f36e7f79ef] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 698.466538] env[62503]: ERROR nova.compute.manager [instance: f244a7c9-2f39-4f91-aeba-e5f36e7f79ef] created_port_ids = self._update_ports_for_instance( [ 698.466870] env[62503]: ERROR nova.compute.manager [instance: f244a7c9-2f39-4f91-aeba-e5f36e7f79ef] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 698.466870] env[62503]: ERROR nova.compute.manager [instance: f244a7c9-2f39-4f91-aeba-e5f36e7f79ef] with excutils.save_and_reraise_exception(): [ 698.466870] env[62503]: ERROR nova.compute.manager [instance: f244a7c9-2f39-4f91-aeba-e5f36e7f79ef] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 698.466870] env[62503]: ERROR nova.compute.manager [instance: f244a7c9-2f39-4f91-aeba-e5f36e7f79ef] self.force_reraise() [ 698.466870] env[62503]: ERROR nova.compute.manager [instance: f244a7c9-2f39-4f91-aeba-e5f36e7f79ef] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 698.466870] env[62503]: ERROR nova.compute.manager [instance: f244a7c9-2f39-4f91-aeba-e5f36e7f79ef] raise self.value [ 698.466870] env[62503]: ERROR nova.compute.manager [instance: f244a7c9-2f39-4f91-aeba-e5f36e7f79ef] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 698.466870] env[62503]: ERROR nova.compute.manager [instance: f244a7c9-2f39-4f91-aeba-e5f36e7f79ef] updated_port = self._update_port( [ 698.466870] env[62503]: ERROR nova.compute.manager [instance: f244a7c9-2f39-4f91-aeba-e5f36e7f79ef] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 698.466870] env[62503]: ERROR nova.compute.manager [instance: f244a7c9-2f39-4f91-aeba-e5f36e7f79ef] _ensure_no_port_binding_failure(port) [ 698.466870] env[62503]: ERROR nova.compute.manager [instance: f244a7c9-2f39-4f91-aeba-e5f36e7f79ef] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 698.466870] env[62503]: ERROR nova.compute.manager [instance: f244a7c9-2f39-4f91-aeba-e5f36e7f79ef] raise exception.PortBindingFailed(port_id=port['id']) [ 698.467182] env[62503]: ERROR nova.compute.manager [instance: f244a7c9-2f39-4f91-aeba-e5f36e7f79ef] nova.exception.PortBindingFailed: Binding failed for port bf8b4134-287d-4636-923d-ae78e2eb3f0e, please check neutron logs for more information. [ 698.467182] env[62503]: ERROR nova.compute.manager [instance: f244a7c9-2f39-4f91-aeba-e5f36e7f79ef] [ 698.467182] env[62503]: INFO nova.compute.manager [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] [instance: f244a7c9-2f39-4f91-aeba-e5f36e7f79ef] Terminating instance [ 698.469319] env[62503]: DEBUG oslo_concurrency.lockutils [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] Acquiring lock "refresh_cache-f244a7c9-2f39-4f91-aeba-e5f36e7f79ef" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 698.470120] env[62503]: DEBUG nova.network.neutron [None req-04b5e2fe-420a-4980-828f-5aaaaac6b6d6 tempest-MigrationsAdminTest-1483146718 tempest-MigrationsAdminTest-1483146718-project-member] [instance: 0af1e65d-ca88-475e-a871-4087bd49cd9d] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 698.678549] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bafda100-bb13-4436-9fc8-66a769396697 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.682018] env[62503]: DEBUG nova.network.neutron [None req-04b5e2fe-420a-4980-828f-5aaaaac6b6d6 tempest-MigrationsAdminTest-1483146718 tempest-MigrationsAdminTest-1483146718-project-member] [instance: 0af1e65d-ca88-475e-a871-4087bd49cd9d] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 698.689567] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d118327-128f-4576-b487-c93b97ef6870 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.723952] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9025b2a-187a-457a-891b-6e6188a80a95 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.732282] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10012c3f-4774-4a23-9121-d4e563d660cb {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.749013] env[62503]: DEBUG nova.compute.provider_tree [None req-ffbee50c-b7fd-4096-9606-99a0d4599842 tempest-ServersTestJSON-1104182753 tempest-ServersTestJSON-1104182753-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 698.805266] env[62503]: DEBUG nova.network.neutron [req-71e3c1c7-e372-47f9-bbf9-a742c97c1cc3 req-d3cf83c2-8473-46b6-b68d-363fca6a1fd5 service nova] [instance: f244a7c9-2f39-4f91-aeba-e5f36e7f79ef] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 699.012331] env[62503]: DEBUG nova.network.neutron [req-71e3c1c7-e372-47f9-bbf9-a742c97c1cc3 req-d3cf83c2-8473-46b6-b68d-363fca6a1fd5 service nova] [instance: f244a7c9-2f39-4f91-aeba-e5f36e7f79ef] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 699.186095] env[62503]: DEBUG oslo_concurrency.lockutils [None req-04b5e2fe-420a-4980-828f-5aaaaac6b6d6 tempest-MigrationsAdminTest-1483146718 tempest-MigrationsAdminTest-1483146718-project-member] Releasing lock "refresh_cache-0af1e65d-ca88-475e-a871-4087bd49cd9d" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 699.186438] env[62503]: DEBUG nova.compute.manager [None req-04b5e2fe-420a-4980-828f-5aaaaac6b6d6 tempest-MigrationsAdminTest-1483146718 tempest-MigrationsAdminTest-1483146718-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62503) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3032}} [ 699.186520] env[62503]: DEBUG nova.compute.manager [None req-04b5e2fe-420a-4980-828f-5aaaaac6b6d6 tempest-MigrationsAdminTest-1483146718 tempest-MigrationsAdminTest-1483146718-project-member] [instance: 0af1e65d-ca88-475e-a871-4087bd49cd9d] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 699.186684] env[62503]: DEBUG nova.network.neutron [None req-04b5e2fe-420a-4980-828f-5aaaaac6b6d6 tempest-MigrationsAdminTest-1483146718 tempest-MigrationsAdminTest-1483146718-project-member] [instance: 0af1e65d-ca88-475e-a871-4087bd49cd9d] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 699.220956] env[62503]: DEBUG nova.network.neutron [None req-04b5e2fe-420a-4980-828f-5aaaaac6b6d6 tempest-MigrationsAdminTest-1483146718 tempest-MigrationsAdminTest-1483146718-project-member] [instance: 0af1e65d-ca88-475e-a871-4087bd49cd9d] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 699.251880] env[62503]: DEBUG nova.scheduler.client.report [None req-ffbee50c-b7fd-4096-9606-99a0d4599842 tempest-ServersTestJSON-1104182753 tempest-ServersTestJSON-1104182753-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 699.516186] env[62503]: DEBUG oslo_concurrency.lockutils [req-71e3c1c7-e372-47f9-bbf9-a742c97c1cc3 req-d3cf83c2-8473-46b6-b68d-363fca6a1fd5 service nova] Releasing lock "refresh_cache-f244a7c9-2f39-4f91-aeba-e5f36e7f79ef" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 699.516612] env[62503]: DEBUG oslo_concurrency.lockutils [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] Acquired lock "refresh_cache-f244a7c9-2f39-4f91-aeba-e5f36e7f79ef" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 699.516798] env[62503]: DEBUG nova.network.neutron [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] [instance: f244a7c9-2f39-4f91-aeba-e5f36e7f79ef] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 699.724976] env[62503]: DEBUG nova.network.neutron [None req-04b5e2fe-420a-4980-828f-5aaaaac6b6d6 tempest-MigrationsAdminTest-1483146718 tempest-MigrationsAdminTest-1483146718-project-member] [instance: 0af1e65d-ca88-475e-a871-4087bd49cd9d] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 699.756138] env[62503]: DEBUG oslo_concurrency.lockutils [None req-ffbee50c-b7fd-4096-9606-99a0d4599842 tempest-ServersTestJSON-1104182753 tempest-ServersTestJSON-1104182753-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.002s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 699.756796] env[62503]: ERROR nova.compute.manager [None req-ffbee50c-b7fd-4096-9606-99a0d4599842 tempest-ServersTestJSON-1104182753 tempest-ServersTestJSON-1104182753-project-member] [instance: d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 65cd4476-dc4f-47cb-b6bf-f086c9b46323, please check neutron logs for more information. [ 699.756796] env[62503]: ERROR nova.compute.manager [instance: d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e] Traceback (most recent call last): [ 699.756796] env[62503]: ERROR nova.compute.manager [instance: d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e] File "/opt/stack/nova/nova/compute/manager.py", line 2644, in _build_and_run_instance [ 699.756796] env[62503]: ERROR nova.compute.manager [instance: d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e] self.driver.spawn(context, instance, image_meta, [ 699.756796] env[62503]: ERROR nova.compute.manager [instance: d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 699.756796] env[62503]: ERROR nova.compute.manager [instance: d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 699.756796] env[62503]: ERROR nova.compute.manager [instance: d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 699.756796] env[62503]: ERROR nova.compute.manager [instance: d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e] vm_ref = self.build_virtual_machine(instance, [ 699.756796] env[62503]: ERROR nova.compute.manager [instance: d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 699.756796] env[62503]: ERROR nova.compute.manager [instance: d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e] vif_infos = vmwarevif.get_vif_info(self._session, [ 699.756796] env[62503]: ERROR nova.compute.manager [instance: d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 699.759676] env[62503]: ERROR nova.compute.manager [instance: d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e] for vif in network_info: [ 699.759676] env[62503]: ERROR nova.compute.manager [instance: d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 699.759676] env[62503]: ERROR nova.compute.manager [instance: d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e] return self._sync_wrapper(fn, *args, **kwargs) [ 699.759676] env[62503]: ERROR nova.compute.manager [instance: d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 699.759676] env[62503]: ERROR nova.compute.manager [instance: d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e] self.wait() [ 699.759676] env[62503]: ERROR nova.compute.manager [instance: d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 699.759676] env[62503]: ERROR nova.compute.manager [instance: d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e] self[:] = self._gt.wait() [ 699.759676] env[62503]: ERROR nova.compute.manager [instance: d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 699.759676] env[62503]: ERROR nova.compute.manager [instance: d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e] return self._exit_event.wait() [ 699.759676] env[62503]: ERROR nova.compute.manager [instance: d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 699.759676] env[62503]: ERROR nova.compute.manager [instance: d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e] current.throw(*self._exc) [ 699.759676] env[62503]: ERROR nova.compute.manager [instance: d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 699.759676] env[62503]: ERROR nova.compute.manager [instance: d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e] result = function(*args, **kwargs) [ 699.760172] env[62503]: ERROR nova.compute.manager [instance: d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 699.760172] env[62503]: ERROR nova.compute.manager [instance: d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e] return func(*args, **kwargs) [ 699.760172] env[62503]: ERROR nova.compute.manager [instance: d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e] File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 699.760172] env[62503]: ERROR nova.compute.manager [instance: d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e] raise e [ 699.760172] env[62503]: ERROR nova.compute.manager [instance: d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e] File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 699.760172] env[62503]: ERROR nova.compute.manager [instance: d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e] nwinfo = self.network_api.allocate_for_instance( [ 699.760172] env[62503]: ERROR nova.compute.manager [instance: d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 699.760172] env[62503]: ERROR nova.compute.manager [instance: d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e] created_port_ids = self._update_ports_for_instance( [ 699.760172] env[62503]: ERROR nova.compute.manager [instance: d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 699.760172] env[62503]: ERROR nova.compute.manager [instance: d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e] with excutils.save_and_reraise_exception(): [ 699.760172] env[62503]: ERROR nova.compute.manager [instance: d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 699.760172] env[62503]: ERROR nova.compute.manager [instance: d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e] self.force_reraise() [ 699.760172] env[62503]: ERROR nova.compute.manager [instance: d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 699.760501] env[62503]: ERROR nova.compute.manager [instance: d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e] raise self.value [ 699.760501] env[62503]: ERROR nova.compute.manager [instance: d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 699.760501] env[62503]: ERROR nova.compute.manager [instance: d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e] updated_port = self._update_port( [ 699.760501] env[62503]: ERROR nova.compute.manager [instance: d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 699.760501] env[62503]: ERROR nova.compute.manager [instance: d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e] _ensure_no_port_binding_failure(port) [ 699.760501] env[62503]: ERROR nova.compute.manager [instance: d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 699.760501] env[62503]: ERROR nova.compute.manager [instance: d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e] raise exception.PortBindingFailed(port_id=port['id']) [ 699.760501] env[62503]: ERROR nova.compute.manager [instance: d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e] nova.exception.PortBindingFailed: Binding failed for port 65cd4476-dc4f-47cb-b6bf-f086c9b46323, please check neutron logs for more information. [ 699.760501] env[62503]: ERROR nova.compute.manager [instance: d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e] [ 699.760501] env[62503]: DEBUG nova.compute.utils [None req-ffbee50c-b7fd-4096-9606-99a0d4599842 tempest-ServersTestJSON-1104182753 tempest-ServersTestJSON-1104182753-project-member] [instance: d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e] Binding failed for port 65cd4476-dc4f-47cb-b6bf-f086c9b46323, please check neutron logs for more information. {{(pid=62503) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 699.760774] env[62503]: DEBUG oslo_concurrency.lockutils [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.057s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 699.760774] env[62503]: INFO nova.compute.claims [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] [instance: 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 699.763680] env[62503]: DEBUG nova.compute.manager [None req-ffbee50c-b7fd-4096-9606-99a0d4599842 tempest-ServersTestJSON-1104182753 tempest-ServersTestJSON-1104182753-project-member] [instance: d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e] Build of instance d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e was re-scheduled: Binding failed for port 65cd4476-dc4f-47cb-b6bf-f086c9b46323, please check neutron logs for more information. {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2483}} [ 699.763841] env[62503]: DEBUG nova.compute.manager [None req-ffbee50c-b7fd-4096-9606-99a0d4599842 tempest-ServersTestJSON-1104182753 tempest-ServersTestJSON-1104182753-project-member] [instance: d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e] Unplugging VIFs for instance {{(pid=62503) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3009}} [ 699.764066] env[62503]: DEBUG oslo_concurrency.lockutils [None req-ffbee50c-b7fd-4096-9606-99a0d4599842 tempest-ServersTestJSON-1104182753 tempest-ServersTestJSON-1104182753-project-member] Acquiring lock "refresh_cache-d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 699.764241] env[62503]: DEBUG oslo_concurrency.lockutils [None req-ffbee50c-b7fd-4096-9606-99a0d4599842 tempest-ServersTestJSON-1104182753 tempest-ServersTestJSON-1104182753-project-member] Acquired lock "refresh_cache-d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 699.764353] env[62503]: DEBUG nova.network.neutron [None req-ffbee50c-b7fd-4096-9606-99a0d4599842 tempest-ServersTestJSON-1104182753 tempest-ServersTestJSON-1104182753-project-member] [instance: d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 700.060388] env[62503]: DEBUG nova.network.neutron [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] [instance: f244a7c9-2f39-4f91-aeba-e5f36e7f79ef] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 700.169770] env[62503]: DEBUG nova.network.neutron [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] [instance: f244a7c9-2f39-4f91-aeba-e5f36e7f79ef] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 700.227508] env[62503]: INFO nova.compute.manager [None req-04b5e2fe-420a-4980-828f-5aaaaac6b6d6 tempest-MigrationsAdminTest-1483146718 tempest-MigrationsAdminTest-1483146718-project-member] [instance: 0af1e65d-ca88-475e-a871-4087bd49cd9d] Took 1.04 seconds to deallocate network for instance. [ 700.290467] env[62503]: DEBUG nova.network.neutron [None req-ffbee50c-b7fd-4096-9606-99a0d4599842 tempest-ServersTestJSON-1104182753 tempest-ServersTestJSON-1104182753-project-member] [instance: d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 700.349635] env[62503]: DEBUG nova.compute.manager [req-77b1f693-f45a-4331-8235-26c66584f5cd req-ce437a3a-c3b2-4aac-b6b4-f75e4e9ab858 service nova] [instance: f244a7c9-2f39-4f91-aeba-e5f36e7f79ef] Received event network-vif-deleted-bf8b4134-287d-4636-923d-ae78e2eb3f0e {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 700.384885] env[62503]: DEBUG nova.network.neutron [None req-ffbee50c-b7fd-4096-9606-99a0d4599842 tempest-ServersTestJSON-1104182753 tempest-ServersTestJSON-1104182753-project-member] [instance: d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 700.673217] env[62503]: DEBUG oslo_concurrency.lockutils [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] Releasing lock "refresh_cache-f244a7c9-2f39-4f91-aeba-e5f36e7f79ef" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 700.673682] env[62503]: DEBUG nova.compute.manager [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] [instance: f244a7c9-2f39-4f91-aeba-e5f36e7f79ef] Start destroying the instance on the hypervisor. {{(pid=62503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3156}} [ 700.673994] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] [instance: f244a7c9-2f39-4f91-aeba-e5f36e7f79ef] Destroying instance {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 700.674238] env[62503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0fe8b982-029d-4f81-b53e-41f74499e237 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.685556] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4064a128-60fd-4b16-9a6b-5ed99c0753d6 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.714794] env[62503]: WARNING nova.virt.vmwareapi.vmops [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] [instance: f244a7c9-2f39-4f91-aeba-e5f36e7f79ef] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance f244a7c9-2f39-4f91-aeba-e5f36e7f79ef could not be found. [ 700.715060] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] [instance: f244a7c9-2f39-4f91-aeba-e5f36e7f79ef] Instance destroyed {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 700.715262] env[62503]: INFO nova.compute.manager [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] [instance: f244a7c9-2f39-4f91-aeba-e5f36e7f79ef] Took 0.04 seconds to destroy the instance on the hypervisor. [ 700.715525] env[62503]: DEBUG oslo.service.loopingcall [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 700.715772] env[62503]: DEBUG nova.compute.manager [-] [instance: f244a7c9-2f39-4f91-aeba-e5f36e7f79ef] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 700.715860] env[62503]: DEBUG nova.network.neutron [-] [instance: f244a7c9-2f39-4f91-aeba-e5f36e7f79ef] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 700.750111] env[62503]: DEBUG nova.network.neutron [-] [instance: f244a7c9-2f39-4f91-aeba-e5f36e7f79ef] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 700.887639] env[62503]: DEBUG oslo_concurrency.lockutils [None req-ffbee50c-b7fd-4096-9606-99a0d4599842 tempest-ServersTestJSON-1104182753 tempest-ServersTestJSON-1104182753-project-member] Releasing lock "refresh_cache-d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 700.888420] env[62503]: DEBUG nova.compute.manager [None req-ffbee50c-b7fd-4096-9606-99a0d4599842 tempest-ServersTestJSON-1104182753 tempest-ServersTestJSON-1104182753-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62503) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3032}} [ 700.888712] env[62503]: DEBUG nova.compute.manager [None req-ffbee50c-b7fd-4096-9606-99a0d4599842 tempest-ServersTestJSON-1104182753 tempest-ServersTestJSON-1104182753-project-member] [instance: d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 700.888938] env[62503]: DEBUG nova.network.neutron [None req-ffbee50c-b7fd-4096-9606-99a0d4599842 tempest-ServersTestJSON-1104182753 tempest-ServersTestJSON-1104182753-project-member] [instance: d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 700.912932] env[62503]: DEBUG nova.network.neutron [None req-ffbee50c-b7fd-4096-9606-99a0d4599842 tempest-ServersTestJSON-1104182753 tempest-ServersTestJSON-1104182753-project-member] [instance: d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 701.007641] env[62503]: DEBUG oslo_concurrency.lockutils [None req-2d6004ed-f8ef-4f69-80bc-a0a595a2cd5a tempest-AttachVolumeTestJSON-1238928908 tempest-AttachVolumeTestJSON-1238928908-project-member] Acquiring lock "9eee91c6-a949-453b-8ccd-ba986251ed27" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 701.007887] env[62503]: DEBUG oslo_concurrency.lockutils [None req-2d6004ed-f8ef-4f69-80bc-a0a595a2cd5a tempest-AttachVolumeTestJSON-1238928908 tempest-AttachVolumeTestJSON-1238928908-project-member] Lock "9eee91c6-a949-453b-8ccd-ba986251ed27" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 701.167400] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1583cf5a-a51c-4e38-91e0-6939d3076211 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.177074] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f08140e1-3584-4f88-bd3e-01a1ada9880a {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.211594] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9194ef1-a40e-4851-8917-95e5d4c775d1 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.219195] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d427068d-447a-485c-b2c3-e374c1b5e0dd {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.232445] env[62503]: DEBUG nova.compute.provider_tree [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 701.252132] env[62503]: DEBUG nova.network.neutron [-] [instance: f244a7c9-2f39-4f91-aeba-e5f36e7f79ef] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 701.265845] env[62503]: INFO nova.scheduler.client.report [None req-04b5e2fe-420a-4980-828f-5aaaaac6b6d6 tempest-MigrationsAdminTest-1483146718 tempest-MigrationsAdminTest-1483146718-project-member] Deleted allocations for instance 0af1e65d-ca88-475e-a871-4087bd49cd9d [ 701.417450] env[62503]: DEBUG nova.network.neutron [None req-ffbee50c-b7fd-4096-9606-99a0d4599842 tempest-ServersTestJSON-1104182753 tempest-ServersTestJSON-1104182753-project-member] [instance: d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 701.735850] env[62503]: DEBUG nova.scheduler.client.report [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 701.755092] env[62503]: INFO nova.compute.manager [-] [instance: f244a7c9-2f39-4f91-aeba-e5f36e7f79ef] Took 1.04 seconds to deallocate network for instance. [ 701.758674] env[62503]: DEBUG nova.compute.claims [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] [instance: f244a7c9-2f39-4f91-aeba-e5f36e7f79ef] Aborting claim: {{(pid=62503) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 701.758765] env[62503]: DEBUG oslo_concurrency.lockutils [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 701.776896] env[62503]: DEBUG oslo_concurrency.lockutils [None req-04b5e2fe-420a-4980-828f-5aaaaac6b6d6 tempest-MigrationsAdminTest-1483146718 tempest-MigrationsAdminTest-1483146718-project-member] Lock "0af1e65d-ca88-475e-a871-4087bd49cd9d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 116.594s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 701.920322] env[62503]: INFO nova.compute.manager [None req-ffbee50c-b7fd-4096-9606-99a0d4599842 tempest-ServersTestJSON-1104182753 tempest-ServersTestJSON-1104182753-project-member] [instance: d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e] Took 1.03 seconds to deallocate network for instance. [ 702.046791] env[62503]: DEBUG oslo_concurrency.lockutils [None req-795b6309-d5a9-40f9-8a07-369f392b468e tempest-ServersTestManualDisk-925778780 tempest-ServersTestManualDisk-925778780-project-member] Acquiring lock "f6f17748-815c-417f-bce6-3bc97f23b637" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 702.047434] env[62503]: DEBUG oslo_concurrency.lockutils [None req-795b6309-d5a9-40f9-8a07-369f392b468e tempest-ServersTestManualDisk-925778780 tempest-ServersTestManualDisk-925778780-project-member] Lock "f6f17748-815c-417f-bce6-3bc97f23b637" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 702.242277] env[62503]: DEBUG oslo_concurrency.lockutils [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.483s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 702.243448] env[62503]: DEBUG nova.compute.manager [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] [instance: 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9] Start building networks asynchronously for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2832}} [ 702.248058] env[62503]: DEBUG oslo_concurrency.lockutils [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.668s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 702.253449] env[62503]: INFO nova.compute.claims [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] [instance: df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 702.280901] env[62503]: DEBUG nova.compute.manager [None req-ef39e50f-5ce8-4286-9c89-acf2f5f9a4d4 tempest-VolumesAdminNegativeTest-585860034 tempest-VolumesAdminNegativeTest-585860034-project-member] [instance: 0b40f385-db0a-460c-b7fd-47e4d6afbaf9] Starting instance... {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 702.757867] env[62503]: DEBUG nova.compute.utils [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] Using /dev/sd instead of None {{(pid=62503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 702.759305] env[62503]: DEBUG nova.compute.manager [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] [instance: 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9] Allocating IP information in the background. {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 702.759513] env[62503]: DEBUG nova.network.neutron [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] [instance: 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9] allocate_for_instance() {{(pid=62503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 702.808543] env[62503]: DEBUG oslo_concurrency.lockutils [None req-ef39e50f-5ce8-4286-9c89-acf2f5f9a4d4 tempest-VolumesAdminNegativeTest-585860034 tempest-VolumesAdminNegativeTest-585860034-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 702.835825] env[62503]: DEBUG nova.policy [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4808e54b983448c399112ade5bd3e543', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '15e74d975960428280fcb64d992b9c63', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62503) authorize /opt/stack/nova/nova/policy.py:201}} [ 702.958646] env[62503]: INFO nova.scheduler.client.report [None req-ffbee50c-b7fd-4096-9606-99a0d4599842 tempest-ServersTestJSON-1104182753 tempest-ServersTestJSON-1104182753-project-member] Deleted allocations for instance d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e [ 703.263748] env[62503]: DEBUG nova.compute.manager [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] [instance: 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9] Start building block device mappings for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2867}} [ 703.313688] env[62503]: DEBUG nova.network.neutron [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] [instance: 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9] Successfully created port: ef09a46d-90a7-42a1-b67d-54eeef8f5b29 {{(pid=62503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 703.472834] env[62503]: DEBUG oslo_concurrency.lockutils [None req-ffbee50c-b7fd-4096-9606-99a0d4599842 tempest-ServersTestJSON-1104182753 tempest-ServersTestJSON-1104182753-project-member] Lock "d1aeccd3-ba9e-46cc-b8a2-b64da9b94d6e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 117.325s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 703.683409] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3354a0b2-708d-4c40-ac8a-4a134820dd9e {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.691538] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a0af44d-469a-44d6-af32-c50edd907020 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.727601] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a36b53d-ed05-420f-94a5-ccb1085eccee {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.737852] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bb0512a-a605-4755-b954-64d22af1f7bd {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.754815] env[62503]: DEBUG nova.compute.provider_tree [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 703.983789] env[62503]: DEBUG nova.compute.manager [None req-98b7d7bf-9e47-4fe2-b967-b3c7085ad444 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: c1693c1f-6497-429c-a7f7-5bf5591684d1] Starting instance... {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 704.259231] env[62503]: DEBUG nova.scheduler.client.report [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 704.273185] env[62503]: DEBUG nova.compute.manager [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] [instance: 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9] Start spawning the instance on the hypervisor. {{(pid=62503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2641}} [ 704.309890] env[62503]: DEBUG nova.virt.hardware [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:26:20Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:26:03Z,direct_url=,disk_format='vmdk',id=8150ca02-f879-471d-8913-459408f127a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26d9ee75d25b4018b8bb1fb11c8bd98c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:26:04Z,virtual_size=,visibility=), allow threads: False {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 704.309890] env[62503]: DEBUG nova.virt.hardware [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] Flavor limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 704.309890] env[62503]: DEBUG nova.virt.hardware [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] Image limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 704.310110] env[62503]: DEBUG nova.virt.hardware [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] Flavor pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 704.310275] env[62503]: DEBUG nova.virt.hardware [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] Image pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 704.310558] env[62503]: DEBUG nova.virt.hardware [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 704.311129] env[62503]: DEBUG nova.virt.hardware [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 704.311397] env[62503]: DEBUG nova.virt.hardware [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 704.311679] env[62503]: DEBUG nova.virt.hardware [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] Got 1 possible topologies {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 704.312452] env[62503]: DEBUG nova.virt.hardware [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 704.312452] env[62503]: DEBUG nova.virt.hardware [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 704.313545] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4dc27be1-f917-4c8f-bb5a-84c6b9f37266 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.321900] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2382560a-8301-4651-85a6-db57f74f07e1 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.504713] env[62503]: DEBUG oslo_concurrency.lockutils [None req-98b7d7bf-9e47-4fe2-b967-b3c7085ad444 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 704.658353] env[62503]: DEBUG nova.compute.manager [req-ff74aa9e-4814-40a0-9de3-a746c378bfce req-a8f3e242-578a-4b0b-bb51-3dbcfb02e435 service nova] [instance: 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9] Received event network-changed-ef09a46d-90a7-42a1-b67d-54eeef8f5b29 {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 704.658557] env[62503]: DEBUG nova.compute.manager [req-ff74aa9e-4814-40a0-9de3-a746c378bfce req-a8f3e242-578a-4b0b-bb51-3dbcfb02e435 service nova] [instance: 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9] Refreshing instance network info cache due to event network-changed-ef09a46d-90a7-42a1-b67d-54eeef8f5b29. {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11432}} [ 704.658829] env[62503]: DEBUG oslo_concurrency.lockutils [req-ff74aa9e-4814-40a0-9de3-a746c378bfce req-a8f3e242-578a-4b0b-bb51-3dbcfb02e435 service nova] Acquiring lock "refresh_cache-0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 704.659012] env[62503]: DEBUG oslo_concurrency.lockutils [req-ff74aa9e-4814-40a0-9de3-a746c378bfce req-a8f3e242-578a-4b0b-bb51-3dbcfb02e435 service nova] Acquired lock "refresh_cache-0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 704.659272] env[62503]: DEBUG nova.network.neutron [req-ff74aa9e-4814-40a0-9de3-a746c378bfce req-a8f3e242-578a-4b0b-bb51-3dbcfb02e435 service nova] [instance: 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9] Refreshing network info cache for port ef09a46d-90a7-42a1-b67d-54eeef8f5b29 {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 704.719773] env[62503]: DEBUG oslo_concurrency.lockutils [None req-1bdb5a3e-b1f3-497d-a480-68bcfc62ffc1 tempest-InstanceActionsNegativeTestJSON-133759453 tempest-InstanceActionsNegativeTestJSON-133759453-project-member] Acquiring lock "1251e59f-9c01-4115-8400-40aacedd97e2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 704.720067] env[62503]: DEBUG oslo_concurrency.lockutils [None req-1bdb5a3e-b1f3-497d-a480-68bcfc62ffc1 tempest-InstanceActionsNegativeTestJSON-133759453 tempest-InstanceActionsNegativeTestJSON-133759453-project-member] Lock "1251e59f-9c01-4115-8400-40aacedd97e2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 704.763834] env[62503]: DEBUG oslo_concurrency.lockutils [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.516s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 704.764216] env[62503]: DEBUG nova.compute.manager [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] [instance: df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36] Start building networks asynchronously for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2832}} [ 704.767231] env[62503]: DEBUG oslo_concurrency.lockutils [None req-e421dd54-c082-45de-8156-0a223999bd64 tempest-ServerRescueTestJSON-89698622 tempest-ServerRescueTestJSON-89698622-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.239s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 704.769118] env[62503]: INFO nova.compute.claims [None req-e421dd54-c082-45de-8156-0a223999bd64 tempest-ServerRescueTestJSON-89698622 tempest-ServerRescueTestJSON-89698622-project-member] [instance: d4930731-7333-426c-a2fc-a732d351a0f0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 705.168213] env[62503]: ERROR nova.compute.manager [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port ef09a46d-90a7-42a1-b67d-54eeef8f5b29, please check neutron logs for more information. [ 705.168213] env[62503]: ERROR nova.compute.manager Traceback (most recent call last): [ 705.168213] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 705.168213] env[62503]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 705.168213] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 705.168213] env[62503]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 705.168213] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 705.168213] env[62503]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 705.168213] env[62503]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 705.168213] env[62503]: ERROR nova.compute.manager self.force_reraise() [ 705.168213] env[62503]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 705.168213] env[62503]: ERROR nova.compute.manager raise self.value [ 705.168213] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 705.168213] env[62503]: ERROR nova.compute.manager updated_port = self._update_port( [ 705.168213] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 705.168213] env[62503]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 705.168673] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 705.168673] env[62503]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 705.168673] env[62503]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port ef09a46d-90a7-42a1-b67d-54eeef8f5b29, please check neutron logs for more information. [ 705.168673] env[62503]: ERROR nova.compute.manager [ 705.168673] env[62503]: Traceback (most recent call last): [ 705.168673] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 705.168673] env[62503]: listener.cb(fileno) [ 705.168673] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 705.168673] env[62503]: result = function(*args, **kwargs) [ 705.168673] env[62503]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 705.168673] env[62503]: return func(*args, **kwargs) [ 705.168673] env[62503]: File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 705.168673] env[62503]: raise e [ 705.168673] env[62503]: File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 705.168673] env[62503]: nwinfo = self.network_api.allocate_for_instance( [ 705.168673] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 705.168673] env[62503]: created_port_ids = self._update_ports_for_instance( [ 705.168673] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 705.168673] env[62503]: with excutils.save_and_reraise_exception(): [ 705.168673] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 705.168673] env[62503]: self.force_reraise() [ 705.168673] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 705.168673] env[62503]: raise self.value [ 705.168673] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 705.168673] env[62503]: updated_port = self._update_port( [ 705.168673] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 705.168673] env[62503]: _ensure_no_port_binding_failure(port) [ 705.168673] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 705.168673] env[62503]: raise exception.PortBindingFailed(port_id=port['id']) [ 705.169800] env[62503]: nova.exception.PortBindingFailed: Binding failed for port ef09a46d-90a7-42a1-b67d-54eeef8f5b29, please check neutron logs for more information. [ 705.169800] env[62503]: Removing descriptor: 14 [ 705.169800] env[62503]: ERROR nova.compute.manager [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] [instance: 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port ef09a46d-90a7-42a1-b67d-54eeef8f5b29, please check neutron logs for more information. [ 705.169800] env[62503]: ERROR nova.compute.manager [instance: 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9] Traceback (most recent call last): [ 705.169800] env[62503]: ERROR nova.compute.manager [instance: 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9] File "/opt/stack/nova/nova/compute/manager.py", line 2897, in _build_resources [ 705.169800] env[62503]: ERROR nova.compute.manager [instance: 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9] yield resources [ 705.169800] env[62503]: ERROR nova.compute.manager [instance: 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9] File "/opt/stack/nova/nova/compute/manager.py", line 2644, in _build_and_run_instance [ 705.169800] env[62503]: ERROR nova.compute.manager [instance: 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9] self.driver.spawn(context, instance, image_meta, [ 705.169800] env[62503]: ERROR nova.compute.manager [instance: 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 705.169800] env[62503]: ERROR nova.compute.manager [instance: 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9] self._vmops.spawn(context, instance, image_meta, injected_files, [ 705.169800] env[62503]: ERROR nova.compute.manager [instance: 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 705.169800] env[62503]: ERROR nova.compute.manager [instance: 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9] vm_ref = self.build_virtual_machine(instance, [ 705.170524] env[62503]: ERROR nova.compute.manager [instance: 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 705.170524] env[62503]: ERROR nova.compute.manager [instance: 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9] vif_infos = vmwarevif.get_vif_info(self._session, [ 705.170524] env[62503]: ERROR nova.compute.manager [instance: 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 705.170524] env[62503]: ERROR nova.compute.manager [instance: 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9] for vif in network_info: [ 705.170524] env[62503]: ERROR nova.compute.manager [instance: 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 705.170524] env[62503]: ERROR nova.compute.manager [instance: 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9] return self._sync_wrapper(fn, *args, **kwargs) [ 705.170524] env[62503]: ERROR nova.compute.manager [instance: 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 705.170524] env[62503]: ERROR nova.compute.manager [instance: 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9] self.wait() [ 705.170524] env[62503]: ERROR nova.compute.manager [instance: 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 705.170524] env[62503]: ERROR nova.compute.manager [instance: 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9] self[:] = self._gt.wait() [ 705.170524] env[62503]: ERROR nova.compute.manager [instance: 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 705.170524] env[62503]: ERROR nova.compute.manager [instance: 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9] return self._exit_event.wait() [ 705.170524] env[62503]: ERROR nova.compute.manager [instance: 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 705.170845] env[62503]: ERROR nova.compute.manager [instance: 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9] result = hub.switch() [ 705.170845] env[62503]: ERROR nova.compute.manager [instance: 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 705.170845] env[62503]: ERROR nova.compute.manager [instance: 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9] return self.greenlet.switch() [ 705.170845] env[62503]: ERROR nova.compute.manager [instance: 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 705.170845] env[62503]: ERROR nova.compute.manager [instance: 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9] result = function(*args, **kwargs) [ 705.170845] env[62503]: ERROR nova.compute.manager [instance: 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 705.170845] env[62503]: ERROR nova.compute.manager [instance: 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9] return func(*args, **kwargs) [ 705.170845] env[62503]: ERROR nova.compute.manager [instance: 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9] File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 705.170845] env[62503]: ERROR nova.compute.manager [instance: 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9] raise e [ 705.170845] env[62503]: ERROR nova.compute.manager [instance: 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9] File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 705.170845] env[62503]: ERROR nova.compute.manager [instance: 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9] nwinfo = self.network_api.allocate_for_instance( [ 705.170845] env[62503]: ERROR nova.compute.manager [instance: 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 705.170845] env[62503]: ERROR nova.compute.manager [instance: 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9] created_port_ids = self._update_ports_for_instance( [ 705.171273] env[62503]: ERROR nova.compute.manager [instance: 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 705.171273] env[62503]: ERROR nova.compute.manager [instance: 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9] with excutils.save_and_reraise_exception(): [ 705.171273] env[62503]: ERROR nova.compute.manager [instance: 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 705.171273] env[62503]: ERROR nova.compute.manager [instance: 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9] self.force_reraise() [ 705.171273] env[62503]: ERROR nova.compute.manager [instance: 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 705.171273] env[62503]: ERROR nova.compute.manager [instance: 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9] raise self.value [ 705.171273] env[62503]: ERROR nova.compute.manager [instance: 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 705.171273] env[62503]: ERROR nova.compute.manager [instance: 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9] updated_port = self._update_port( [ 705.171273] env[62503]: ERROR nova.compute.manager [instance: 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 705.171273] env[62503]: ERROR nova.compute.manager [instance: 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9] _ensure_no_port_binding_failure(port) [ 705.171273] env[62503]: ERROR nova.compute.manager [instance: 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 705.171273] env[62503]: ERROR nova.compute.manager [instance: 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9] raise exception.PortBindingFailed(port_id=port['id']) [ 705.171568] env[62503]: ERROR nova.compute.manager [instance: 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9] nova.exception.PortBindingFailed: Binding failed for port ef09a46d-90a7-42a1-b67d-54eeef8f5b29, please check neutron logs for more information. [ 705.171568] env[62503]: ERROR nova.compute.manager [instance: 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9] [ 705.171568] env[62503]: INFO nova.compute.manager [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] [instance: 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9] Terminating instance [ 705.172722] env[62503]: DEBUG oslo_concurrency.lockutils [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] Acquiring lock "refresh_cache-0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 705.180571] env[62503]: DEBUG nova.network.neutron [req-ff74aa9e-4814-40a0-9de3-a746c378bfce req-a8f3e242-578a-4b0b-bb51-3dbcfb02e435 service nova] [instance: 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 705.269348] env[62503]: DEBUG nova.compute.utils [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] Using /dev/sd instead of None {{(pid=62503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 705.270774] env[62503]: DEBUG nova.compute.manager [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] [instance: df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36] Allocating IP information in the background. {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 705.270937] env[62503]: DEBUG nova.network.neutron [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] [instance: df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36] allocate_for_instance() {{(pid=62503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 705.323937] env[62503]: DEBUG nova.network.neutron [req-ff74aa9e-4814-40a0-9de3-a746c378bfce req-a8f3e242-578a-4b0b-bb51-3dbcfb02e435 service nova] [instance: 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 705.346433] env[62503]: DEBUG nova.policy [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4808e54b983448c399112ade5bd3e543', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '15e74d975960428280fcb64d992b9c63', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62503) authorize /opt/stack/nova/nova/policy.py:201}} [ 705.741264] env[62503]: DEBUG nova.network.neutron [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] [instance: df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36] Successfully created port: 9d3ab059-5e2e-4779-af52-242e031464dc {{(pid=62503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 705.777213] env[62503]: DEBUG nova.compute.manager [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] [instance: df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36] Start building block device mappings for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2867}} [ 705.827994] env[62503]: DEBUG oslo_concurrency.lockutils [req-ff74aa9e-4814-40a0-9de3-a746c378bfce req-a8f3e242-578a-4b0b-bb51-3dbcfb02e435 service nova] Releasing lock "refresh_cache-0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 705.827994] env[62503]: DEBUG oslo_concurrency.lockutils [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] Acquired lock "refresh_cache-0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 705.827994] env[62503]: DEBUG nova.network.neutron [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] [instance: 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 706.187924] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87bba76d-f559-454c-bb30-1e8e12ed6a8a {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.196502] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fdb4452-f8a9-41a9-bf04-3b0fff382eeb {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.229329] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d62bd78a-971a-404a-a840-ff5957e23195 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.238709] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa7452bb-5354-4dde-8137-a2763cca6c7b {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.252830] env[62503]: DEBUG nova.compute.provider_tree [None req-e421dd54-c082-45de-8156-0a223999bd64 tempest-ServerRescueTestJSON-89698622 tempest-ServerRescueTestJSON-89698622-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 706.347853] env[62503]: DEBUG nova.network.neutron [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] [instance: 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 706.526535] env[62503]: DEBUG nova.network.neutron [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] [instance: 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 706.691227] env[62503]: DEBUG nova.compute.manager [req-da1be284-d09e-40e7-9aa8-daba4af77e3e req-34f5ae11-1588-4761-a9c7-b028ad3eccf6 service nova] [instance: 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9] Received event network-vif-deleted-ef09a46d-90a7-42a1-b67d-54eeef8f5b29 {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 706.758761] env[62503]: DEBUG nova.scheduler.client.report [None req-e421dd54-c082-45de-8156-0a223999bd64 tempest-ServerRescueTestJSON-89698622 tempest-ServerRescueTestJSON-89698622-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 706.790191] env[62503]: DEBUG nova.compute.manager [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] [instance: df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36] Start spawning the instance on the hypervisor. {{(pid=62503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2641}} [ 706.821251] env[62503]: DEBUG nova.virt.hardware [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:26:20Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:26:03Z,direct_url=,disk_format='vmdk',id=8150ca02-f879-471d-8913-459408f127a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26d9ee75d25b4018b8bb1fb11c8bd98c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:26:04Z,virtual_size=,visibility=), allow threads: False {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 706.821471] env[62503]: DEBUG nova.virt.hardware [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] Flavor limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 706.821471] env[62503]: DEBUG nova.virt.hardware [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] Image limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 706.821997] env[62503]: DEBUG nova.virt.hardware [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] Flavor pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 706.821997] env[62503]: DEBUG nova.virt.hardware [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] Image pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 706.821997] env[62503]: DEBUG nova.virt.hardware [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 706.822808] env[62503]: DEBUG nova.virt.hardware [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 706.823044] env[62503]: DEBUG nova.virt.hardware [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 706.824031] env[62503]: DEBUG nova.virt.hardware [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] Got 1 possible topologies {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 706.824279] env[62503]: DEBUG nova.virt.hardware [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 706.824474] env[62503]: DEBUG nova.virt.hardware [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 706.825393] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3abd9d0a-6c06-4092-93a9-76372d88b024 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.838857] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e18e622-62dc-4063-ba59-c3289ec4d049 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.939451] env[62503]: ERROR nova.compute.manager [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 9d3ab059-5e2e-4779-af52-242e031464dc, please check neutron logs for more information. [ 706.939451] env[62503]: ERROR nova.compute.manager Traceback (most recent call last): [ 706.939451] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 706.939451] env[62503]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 706.939451] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 706.939451] env[62503]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 706.939451] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 706.939451] env[62503]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 706.939451] env[62503]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 706.939451] env[62503]: ERROR nova.compute.manager self.force_reraise() [ 706.939451] env[62503]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 706.939451] env[62503]: ERROR nova.compute.manager raise self.value [ 706.939451] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 706.939451] env[62503]: ERROR nova.compute.manager updated_port = self._update_port( [ 706.939451] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 706.939451] env[62503]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 706.939947] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 706.939947] env[62503]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 706.939947] env[62503]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 9d3ab059-5e2e-4779-af52-242e031464dc, please check neutron logs for more information. [ 706.939947] env[62503]: ERROR nova.compute.manager [ 706.939947] env[62503]: Traceback (most recent call last): [ 706.939947] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 706.939947] env[62503]: listener.cb(fileno) [ 706.939947] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 706.939947] env[62503]: result = function(*args, **kwargs) [ 706.939947] env[62503]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 706.939947] env[62503]: return func(*args, **kwargs) [ 706.939947] env[62503]: File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 706.939947] env[62503]: raise e [ 706.939947] env[62503]: File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 706.939947] env[62503]: nwinfo = self.network_api.allocate_for_instance( [ 706.939947] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 706.939947] env[62503]: created_port_ids = self._update_ports_for_instance( [ 706.939947] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 706.939947] env[62503]: with excutils.save_and_reraise_exception(): [ 706.939947] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 706.939947] env[62503]: self.force_reraise() [ 706.939947] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 706.939947] env[62503]: raise self.value [ 706.939947] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 706.939947] env[62503]: updated_port = self._update_port( [ 706.939947] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 706.939947] env[62503]: _ensure_no_port_binding_failure(port) [ 706.939947] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 706.939947] env[62503]: raise exception.PortBindingFailed(port_id=port['id']) [ 706.940688] env[62503]: nova.exception.PortBindingFailed: Binding failed for port 9d3ab059-5e2e-4779-af52-242e031464dc, please check neutron logs for more information. [ 706.940688] env[62503]: Removing descriptor: 14 [ 706.940688] env[62503]: ERROR nova.compute.manager [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] [instance: df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 9d3ab059-5e2e-4779-af52-242e031464dc, please check neutron logs for more information. [ 706.940688] env[62503]: ERROR nova.compute.manager [instance: df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36] Traceback (most recent call last): [ 706.940688] env[62503]: ERROR nova.compute.manager [instance: df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36] File "/opt/stack/nova/nova/compute/manager.py", line 2897, in _build_resources [ 706.940688] env[62503]: ERROR nova.compute.manager [instance: df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36] yield resources [ 706.940688] env[62503]: ERROR nova.compute.manager [instance: df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36] File "/opt/stack/nova/nova/compute/manager.py", line 2644, in _build_and_run_instance [ 706.940688] env[62503]: ERROR nova.compute.manager [instance: df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36] self.driver.spawn(context, instance, image_meta, [ 706.940688] env[62503]: ERROR nova.compute.manager [instance: df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 706.940688] env[62503]: ERROR nova.compute.manager [instance: df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36] self._vmops.spawn(context, instance, image_meta, injected_files, [ 706.940688] env[62503]: ERROR nova.compute.manager [instance: df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 706.940688] env[62503]: ERROR nova.compute.manager [instance: df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36] vm_ref = self.build_virtual_machine(instance, [ 706.940991] env[62503]: ERROR nova.compute.manager [instance: df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 706.940991] env[62503]: ERROR nova.compute.manager [instance: df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36] vif_infos = vmwarevif.get_vif_info(self._session, [ 706.940991] env[62503]: ERROR nova.compute.manager [instance: df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 706.940991] env[62503]: ERROR nova.compute.manager [instance: df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36] for vif in network_info: [ 706.940991] env[62503]: ERROR nova.compute.manager [instance: df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 706.940991] env[62503]: ERROR nova.compute.manager [instance: df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36] return self._sync_wrapper(fn, *args, **kwargs) [ 706.940991] env[62503]: ERROR nova.compute.manager [instance: df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 706.940991] env[62503]: ERROR nova.compute.manager [instance: df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36] self.wait() [ 706.940991] env[62503]: ERROR nova.compute.manager [instance: df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 706.940991] env[62503]: ERROR nova.compute.manager [instance: df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36] self[:] = self._gt.wait() [ 706.940991] env[62503]: ERROR nova.compute.manager [instance: df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 706.940991] env[62503]: ERROR nova.compute.manager [instance: df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36] return self._exit_event.wait() [ 706.940991] env[62503]: ERROR nova.compute.manager [instance: df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 706.941380] env[62503]: ERROR nova.compute.manager [instance: df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36] result = hub.switch() [ 706.941380] env[62503]: ERROR nova.compute.manager [instance: df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 706.941380] env[62503]: ERROR nova.compute.manager [instance: df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36] return self.greenlet.switch() [ 706.941380] env[62503]: ERROR nova.compute.manager [instance: df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 706.941380] env[62503]: ERROR nova.compute.manager [instance: df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36] result = function(*args, **kwargs) [ 706.941380] env[62503]: ERROR nova.compute.manager [instance: df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 706.941380] env[62503]: ERROR nova.compute.manager [instance: df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36] return func(*args, **kwargs) [ 706.941380] env[62503]: ERROR nova.compute.manager [instance: df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36] File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 706.941380] env[62503]: ERROR nova.compute.manager [instance: df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36] raise e [ 706.941380] env[62503]: ERROR nova.compute.manager [instance: df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36] File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 706.941380] env[62503]: ERROR nova.compute.manager [instance: df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36] nwinfo = self.network_api.allocate_for_instance( [ 706.941380] env[62503]: ERROR nova.compute.manager [instance: df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 706.941380] env[62503]: ERROR nova.compute.manager [instance: df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36] created_port_ids = self._update_ports_for_instance( [ 706.941962] env[62503]: ERROR nova.compute.manager [instance: df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 706.941962] env[62503]: ERROR nova.compute.manager [instance: df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36] with excutils.save_and_reraise_exception(): [ 706.941962] env[62503]: ERROR nova.compute.manager [instance: df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 706.941962] env[62503]: ERROR nova.compute.manager [instance: df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36] self.force_reraise() [ 706.941962] env[62503]: ERROR nova.compute.manager [instance: df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 706.941962] env[62503]: ERROR nova.compute.manager [instance: df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36] raise self.value [ 706.941962] env[62503]: ERROR nova.compute.manager [instance: df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 706.941962] env[62503]: ERROR nova.compute.manager [instance: df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36] updated_port = self._update_port( [ 706.941962] env[62503]: ERROR nova.compute.manager [instance: df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 706.941962] env[62503]: ERROR nova.compute.manager [instance: df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36] _ensure_no_port_binding_failure(port) [ 706.941962] env[62503]: ERROR nova.compute.manager [instance: df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 706.941962] env[62503]: ERROR nova.compute.manager [instance: df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36] raise exception.PortBindingFailed(port_id=port['id']) [ 706.942438] env[62503]: ERROR nova.compute.manager [instance: df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36] nova.exception.PortBindingFailed: Binding failed for port 9d3ab059-5e2e-4779-af52-242e031464dc, please check neutron logs for more information. [ 706.942438] env[62503]: ERROR nova.compute.manager [instance: df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36] [ 706.942438] env[62503]: INFO nova.compute.manager [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] [instance: df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36] Terminating instance [ 706.942948] env[62503]: DEBUG oslo_concurrency.lockutils [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] Acquiring lock "refresh_cache-df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 706.943130] env[62503]: DEBUG oslo_concurrency.lockutils [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] Acquired lock "refresh_cache-df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 706.943297] env[62503]: DEBUG nova.network.neutron [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] [instance: df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 707.030137] env[62503]: DEBUG oslo_concurrency.lockutils [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] Releasing lock "refresh_cache-0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 707.030372] env[62503]: DEBUG nova.compute.manager [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] [instance: 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9] Start destroying the instance on the hypervisor. {{(pid=62503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3156}} [ 707.030566] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] [instance: 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9] Destroying instance {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 707.030888] env[62503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a4a4301e-4179-4c18-9941-fbac856db435 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.043348] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ee65132-3144-4bdc-aeab-4343db7b6677 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.071299] env[62503]: WARNING nova.virt.vmwareapi.vmops [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] [instance: 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9 could not be found. [ 707.071526] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] [instance: 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9] Instance destroyed {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 707.071706] env[62503]: INFO nova.compute.manager [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] [instance: 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9] Took 0.04 seconds to destroy the instance on the hypervisor. [ 707.071950] env[62503]: DEBUG oslo.service.loopingcall [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 707.072205] env[62503]: DEBUG nova.compute.manager [-] [instance: 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 707.072328] env[62503]: DEBUG nova.network.neutron [-] [instance: 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 707.089618] env[62503]: DEBUG nova.network.neutron [-] [instance: 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 707.265065] env[62503]: DEBUG oslo_concurrency.lockutils [None req-e421dd54-c082-45de-8156-0a223999bd64 tempest-ServerRescueTestJSON-89698622 tempest-ServerRescueTestJSON-89698622-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.497s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 707.265065] env[62503]: DEBUG nova.compute.manager [None req-e421dd54-c082-45de-8156-0a223999bd64 tempest-ServerRescueTestJSON-89698622 tempest-ServerRescueTestJSON-89698622-project-member] [instance: d4930731-7333-426c-a2fc-a732d351a0f0] Start building networks asynchronously for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2832}} [ 707.267568] env[62503]: DEBUG oslo_concurrency.lockutils [None req-3e326ade-9b42-48b9-9ec9-471f7156ba7e tempest-AttachInterfacesV270Test-1411655473 tempest-AttachInterfacesV270Test-1411655473-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.957s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 707.268895] env[62503]: INFO nova.compute.claims [None req-3e326ade-9b42-48b9-9ec9-471f7156ba7e tempest-AttachInterfacesV270Test-1411655473 tempest-AttachInterfacesV270Test-1411655473-project-member] [instance: 9bda2d4c-38c0-49ba-9a69-402869ff6a65] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 707.466396] env[62503]: DEBUG nova.network.neutron [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] [instance: df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 707.568834] env[62503]: DEBUG nova.network.neutron [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] [instance: df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 707.591182] env[62503]: DEBUG nova.network.neutron [-] [instance: 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 707.774399] env[62503]: DEBUG nova.compute.utils [None req-e421dd54-c082-45de-8156-0a223999bd64 tempest-ServerRescueTestJSON-89698622 tempest-ServerRescueTestJSON-89698622-project-member] Using /dev/sd instead of None {{(pid=62503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 707.774521] env[62503]: DEBUG nova.compute.manager [None req-e421dd54-c082-45de-8156-0a223999bd64 tempest-ServerRescueTestJSON-89698622 tempest-ServerRescueTestJSON-89698622-project-member] [instance: d4930731-7333-426c-a2fc-a732d351a0f0] Allocating IP information in the background. {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 707.774620] env[62503]: DEBUG nova.network.neutron [None req-e421dd54-c082-45de-8156-0a223999bd64 tempest-ServerRescueTestJSON-89698622 tempest-ServerRescueTestJSON-89698622-project-member] [instance: d4930731-7333-426c-a2fc-a732d351a0f0] allocate_for_instance() {{(pid=62503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 707.849297] env[62503]: DEBUG nova.policy [None req-e421dd54-c082-45de-8156-0a223999bd64 tempest-ServerRescueTestJSON-89698622 tempest-ServerRescueTestJSON-89698622-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '344d593190f145b0b67ad85ff80f5907', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '072949a5b3ef46bc86dfbf610e54f0a4', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62503) authorize /opt/stack/nova/nova/policy.py:201}} [ 708.075961] env[62503]: DEBUG oslo_concurrency.lockutils [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] Releasing lock "refresh_cache-df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 708.075961] env[62503]: DEBUG nova.compute.manager [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] [instance: df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36] Start destroying the instance on the hypervisor. {{(pid=62503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3156}} [ 708.075961] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] [instance: df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36] Destroying instance {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 708.080017] env[62503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-91b7fc74-f4d6-4773-8041-4b52e5de2835 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.094224] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f3aa230-a661-4ee5-b071-8de01c19e2f0 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.110478] env[62503]: INFO nova.compute.manager [-] [instance: 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9] Took 1.04 seconds to deallocate network for instance. [ 708.113872] env[62503]: DEBUG nova.compute.claims [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] [instance: 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9] Aborting claim: {{(pid=62503) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 708.114286] env[62503]: DEBUG oslo_concurrency.lockutils [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 708.126352] env[62503]: WARNING nova.virt.vmwareapi.vmops [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] [instance: df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36 could not be found. [ 708.126584] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] [instance: df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36] Instance destroyed {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 708.126763] env[62503]: INFO nova.compute.manager [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] [instance: df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36] Took 0.05 seconds to destroy the instance on the hypervisor. [ 708.127014] env[62503]: DEBUG oslo.service.loopingcall [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 708.127246] env[62503]: DEBUG nova.compute.manager [-] [instance: df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 708.127334] env[62503]: DEBUG nova.network.neutron [-] [instance: df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 708.150902] env[62503]: DEBUG nova.network.neutron [-] [instance: df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 708.284108] env[62503]: DEBUG nova.compute.manager [None req-e421dd54-c082-45de-8156-0a223999bd64 tempest-ServerRescueTestJSON-89698622 tempest-ServerRescueTestJSON-89698622-project-member] [instance: d4930731-7333-426c-a2fc-a732d351a0f0] Start building block device mappings for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2867}} [ 708.303024] env[62503]: DEBUG nova.network.neutron [None req-e421dd54-c082-45de-8156-0a223999bd64 tempest-ServerRescueTestJSON-89698622 tempest-ServerRescueTestJSON-89698622-project-member] [instance: d4930731-7333-426c-a2fc-a732d351a0f0] Successfully created port: 4a362122-9a64-4fbc-91b9-dff850e54b53 {{(pid=62503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 708.468103] env[62503]: DEBUG oslo_concurrency.lockutils [None req-0e26226b-2af4-47a7-8eef-0a1c99867cff tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Acquiring lock "86422990-4215-4628-a7a7-4fdc910e304e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 708.468464] env[62503]: DEBUG oslo_concurrency.lockutils [None req-0e26226b-2af4-47a7-8eef-0a1c99867cff tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Lock "86422990-4215-4628-a7a7-4fdc910e304e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 708.654992] env[62503]: DEBUG nova.network.neutron [-] [instance: df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 708.695326] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8a3d7eb-711c-4f68-83c4-6d5a99320627 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.702666] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-018bcb64-efbd-4b68-bc35-3dbdf20960fb {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.738478] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c842f454-9d65-4697-87d1-723c8b101ea8 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.746736] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a467487-3090-4e83-949d-d415fb8b0fd2 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.761863] env[62503]: DEBUG nova.compute.provider_tree [None req-3e326ade-9b42-48b9-9ec9-471f7156ba7e tempest-AttachInterfacesV270Test-1411655473 tempest-AttachInterfacesV270Test-1411655473-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 708.994246] env[62503]: DEBUG nova.compute.manager [req-57c7ae96-37c3-4857-88c2-17faf2cb6873 req-96a5d6d7-3200-4142-ae5f-125051354685 service nova] [instance: df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36] Received event network-changed-9d3ab059-5e2e-4779-af52-242e031464dc {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 708.994246] env[62503]: DEBUG nova.compute.manager [req-57c7ae96-37c3-4857-88c2-17faf2cb6873 req-96a5d6d7-3200-4142-ae5f-125051354685 service nova] [instance: df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36] Refreshing instance network info cache due to event network-changed-9d3ab059-5e2e-4779-af52-242e031464dc. {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11432}} [ 708.994246] env[62503]: DEBUG oslo_concurrency.lockutils [req-57c7ae96-37c3-4857-88c2-17faf2cb6873 req-96a5d6d7-3200-4142-ae5f-125051354685 service nova] Acquiring lock "refresh_cache-df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 708.994246] env[62503]: DEBUG oslo_concurrency.lockutils [req-57c7ae96-37c3-4857-88c2-17faf2cb6873 req-96a5d6d7-3200-4142-ae5f-125051354685 service nova] Acquired lock "refresh_cache-df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 708.994246] env[62503]: DEBUG nova.network.neutron [req-57c7ae96-37c3-4857-88c2-17faf2cb6873 req-96a5d6d7-3200-4142-ae5f-125051354685 service nova] [instance: df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36] Refreshing network info cache for port 9d3ab059-5e2e-4779-af52-242e031464dc {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 709.160541] env[62503]: INFO nova.compute.manager [-] [instance: df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36] Took 1.03 seconds to deallocate network for instance. [ 709.161609] env[62503]: DEBUG nova.compute.claims [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] [instance: df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36] Aborting claim: {{(pid=62503) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 709.161609] env[62503]: DEBUG oslo_concurrency.lockutils [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 709.267153] env[62503]: DEBUG nova.scheduler.client.report [None req-3e326ade-9b42-48b9-9ec9-471f7156ba7e tempest-AttachInterfacesV270Test-1411655473 tempest-AttachInterfacesV270Test-1411655473-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 709.295861] env[62503]: DEBUG nova.compute.manager [None req-e421dd54-c082-45de-8156-0a223999bd64 tempest-ServerRescueTestJSON-89698622 tempest-ServerRescueTestJSON-89698622-project-member] [instance: d4930731-7333-426c-a2fc-a732d351a0f0] Start spawning the instance on the hypervisor. {{(pid=62503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2641}} [ 709.328018] env[62503]: DEBUG nova.virt.hardware [None req-e421dd54-c082-45de-8156-0a223999bd64 tempest-ServerRescueTestJSON-89698622 tempest-ServerRescueTestJSON-89698622-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:26:20Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:26:03Z,direct_url=,disk_format='vmdk',id=8150ca02-f879-471d-8913-459408f127a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26d9ee75d25b4018b8bb1fb11c8bd98c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:26:04Z,virtual_size=,visibility=), allow threads: False {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 709.328018] env[62503]: DEBUG nova.virt.hardware [None req-e421dd54-c082-45de-8156-0a223999bd64 tempest-ServerRescueTestJSON-89698622 tempest-ServerRescueTestJSON-89698622-project-member] Flavor limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 709.328018] env[62503]: DEBUG nova.virt.hardware [None req-e421dd54-c082-45de-8156-0a223999bd64 tempest-ServerRescueTestJSON-89698622 tempest-ServerRescueTestJSON-89698622-project-member] Image limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 709.328018] env[62503]: DEBUG nova.virt.hardware [None req-e421dd54-c082-45de-8156-0a223999bd64 tempest-ServerRescueTestJSON-89698622 tempest-ServerRescueTestJSON-89698622-project-member] Flavor pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 709.328282] env[62503]: DEBUG nova.virt.hardware [None req-e421dd54-c082-45de-8156-0a223999bd64 tempest-ServerRescueTestJSON-89698622 tempest-ServerRescueTestJSON-89698622-project-member] Image pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 709.328282] env[62503]: DEBUG nova.virt.hardware [None req-e421dd54-c082-45de-8156-0a223999bd64 tempest-ServerRescueTestJSON-89698622 tempest-ServerRescueTestJSON-89698622-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 709.328282] env[62503]: DEBUG nova.virt.hardware [None req-e421dd54-c082-45de-8156-0a223999bd64 tempest-ServerRescueTestJSON-89698622 tempest-ServerRescueTestJSON-89698622-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 709.328599] env[62503]: DEBUG nova.virt.hardware [None req-e421dd54-c082-45de-8156-0a223999bd64 tempest-ServerRescueTestJSON-89698622 tempest-ServerRescueTestJSON-89698622-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 709.328915] env[62503]: DEBUG nova.virt.hardware [None req-e421dd54-c082-45de-8156-0a223999bd64 tempest-ServerRescueTestJSON-89698622 tempest-ServerRescueTestJSON-89698622-project-member] Got 1 possible topologies {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 709.329231] env[62503]: DEBUG nova.virt.hardware [None req-e421dd54-c082-45de-8156-0a223999bd64 tempest-ServerRescueTestJSON-89698622 tempest-ServerRescueTestJSON-89698622-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 709.332016] env[62503]: DEBUG nova.virt.hardware [None req-e421dd54-c082-45de-8156-0a223999bd64 tempest-ServerRescueTestJSON-89698622 tempest-ServerRescueTestJSON-89698622-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 709.332016] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b1b1db7-b4f7-493b-accf-0ddae1a388fa {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.338919] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6be9e73-ce1f-4373-b6da-2daa0c12b9c7 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.509952] env[62503]: DEBUG nova.network.neutron [req-57c7ae96-37c3-4857-88c2-17faf2cb6873 req-96a5d6d7-3200-4142-ae5f-125051354685 service nova] [instance: df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 709.630316] env[62503]: DEBUG nova.network.neutron [req-57c7ae96-37c3-4857-88c2-17faf2cb6873 req-96a5d6d7-3200-4142-ae5f-125051354685 service nova] [instance: df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 709.770252] env[62503]: DEBUG oslo_concurrency.lockutils [None req-3e326ade-9b42-48b9-9ec9-471f7156ba7e tempest-AttachInterfacesV270Test-1411655473 tempest-AttachInterfacesV270Test-1411655473-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.503s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 709.770786] env[62503]: DEBUG nova.compute.manager [None req-3e326ade-9b42-48b9-9ec9-471f7156ba7e tempest-AttachInterfacesV270Test-1411655473 tempest-AttachInterfacesV270Test-1411655473-project-member] [instance: 9bda2d4c-38c0-49ba-9a69-402869ff6a65] Start building networks asynchronously for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2832}} [ 709.777528] env[62503]: DEBUG oslo_concurrency.lockutils [None req-9a47e67e-9a92-496f-b283-39f380d35bee tempest-ServerAddressesTestJSON-1019310862 tempest-ServerAddressesTestJSON-1019310862-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.551s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 709.779124] env[62503]: INFO nova.compute.claims [None req-9a47e67e-9a92-496f-b283-39f380d35bee tempest-ServerAddressesTestJSON-1019310862 tempest-ServerAddressesTestJSON-1019310862-project-member] [instance: ce8d9b01-e99d-4051-bd96-659692a436da] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 709.918368] env[62503]: ERROR nova.compute.manager [None req-e421dd54-c082-45de-8156-0a223999bd64 tempest-ServerRescueTestJSON-89698622 tempest-ServerRescueTestJSON-89698622-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 4a362122-9a64-4fbc-91b9-dff850e54b53, please check neutron logs for more information. [ 709.918368] env[62503]: ERROR nova.compute.manager Traceback (most recent call last): [ 709.918368] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 709.918368] env[62503]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 709.918368] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 709.918368] env[62503]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 709.918368] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 709.918368] env[62503]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 709.918368] env[62503]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 709.918368] env[62503]: ERROR nova.compute.manager self.force_reraise() [ 709.918368] env[62503]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 709.918368] env[62503]: ERROR nova.compute.manager raise self.value [ 709.918368] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 709.918368] env[62503]: ERROR nova.compute.manager updated_port = self._update_port( [ 709.918368] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 709.918368] env[62503]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 709.918972] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 709.918972] env[62503]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 709.918972] env[62503]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 4a362122-9a64-4fbc-91b9-dff850e54b53, please check neutron logs for more information. [ 709.918972] env[62503]: ERROR nova.compute.manager [ 709.918972] env[62503]: Traceback (most recent call last): [ 709.918972] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 709.918972] env[62503]: listener.cb(fileno) [ 709.918972] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 709.918972] env[62503]: result = function(*args, **kwargs) [ 709.918972] env[62503]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 709.918972] env[62503]: return func(*args, **kwargs) [ 709.918972] env[62503]: File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 709.918972] env[62503]: raise e [ 709.918972] env[62503]: File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 709.918972] env[62503]: nwinfo = self.network_api.allocate_for_instance( [ 709.918972] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 709.918972] env[62503]: created_port_ids = self._update_ports_for_instance( [ 709.918972] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 709.918972] env[62503]: with excutils.save_and_reraise_exception(): [ 709.918972] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 709.918972] env[62503]: self.force_reraise() [ 709.918972] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 709.918972] env[62503]: raise self.value [ 709.918972] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 709.918972] env[62503]: updated_port = self._update_port( [ 709.918972] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 709.918972] env[62503]: _ensure_no_port_binding_failure(port) [ 709.918972] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 709.918972] env[62503]: raise exception.PortBindingFailed(port_id=port['id']) [ 709.919785] env[62503]: nova.exception.PortBindingFailed: Binding failed for port 4a362122-9a64-4fbc-91b9-dff850e54b53, please check neutron logs for more information. [ 709.919785] env[62503]: Removing descriptor: 14 [ 709.919785] env[62503]: ERROR nova.compute.manager [None req-e421dd54-c082-45de-8156-0a223999bd64 tempest-ServerRescueTestJSON-89698622 tempest-ServerRescueTestJSON-89698622-project-member] [instance: d4930731-7333-426c-a2fc-a732d351a0f0] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 4a362122-9a64-4fbc-91b9-dff850e54b53, please check neutron logs for more information. [ 709.919785] env[62503]: ERROR nova.compute.manager [instance: d4930731-7333-426c-a2fc-a732d351a0f0] Traceback (most recent call last): [ 709.919785] env[62503]: ERROR nova.compute.manager [instance: d4930731-7333-426c-a2fc-a732d351a0f0] File "/opt/stack/nova/nova/compute/manager.py", line 2897, in _build_resources [ 709.919785] env[62503]: ERROR nova.compute.manager [instance: d4930731-7333-426c-a2fc-a732d351a0f0] yield resources [ 709.919785] env[62503]: ERROR nova.compute.manager [instance: d4930731-7333-426c-a2fc-a732d351a0f0] File "/opt/stack/nova/nova/compute/manager.py", line 2644, in _build_and_run_instance [ 709.919785] env[62503]: ERROR nova.compute.manager [instance: d4930731-7333-426c-a2fc-a732d351a0f0] self.driver.spawn(context, instance, image_meta, [ 709.919785] env[62503]: ERROR nova.compute.manager [instance: d4930731-7333-426c-a2fc-a732d351a0f0] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 709.919785] env[62503]: ERROR nova.compute.manager [instance: d4930731-7333-426c-a2fc-a732d351a0f0] self._vmops.spawn(context, instance, image_meta, injected_files, [ 709.919785] env[62503]: ERROR nova.compute.manager [instance: d4930731-7333-426c-a2fc-a732d351a0f0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 709.919785] env[62503]: ERROR nova.compute.manager [instance: d4930731-7333-426c-a2fc-a732d351a0f0] vm_ref = self.build_virtual_machine(instance, [ 709.920092] env[62503]: ERROR nova.compute.manager [instance: d4930731-7333-426c-a2fc-a732d351a0f0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 709.920092] env[62503]: ERROR nova.compute.manager [instance: d4930731-7333-426c-a2fc-a732d351a0f0] vif_infos = vmwarevif.get_vif_info(self._session, [ 709.920092] env[62503]: ERROR nova.compute.manager [instance: d4930731-7333-426c-a2fc-a732d351a0f0] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 709.920092] env[62503]: ERROR nova.compute.manager [instance: d4930731-7333-426c-a2fc-a732d351a0f0] for vif in network_info: [ 709.920092] env[62503]: ERROR nova.compute.manager [instance: d4930731-7333-426c-a2fc-a732d351a0f0] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 709.920092] env[62503]: ERROR nova.compute.manager [instance: d4930731-7333-426c-a2fc-a732d351a0f0] return self._sync_wrapper(fn, *args, **kwargs) [ 709.920092] env[62503]: ERROR nova.compute.manager [instance: d4930731-7333-426c-a2fc-a732d351a0f0] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 709.920092] env[62503]: ERROR nova.compute.manager [instance: d4930731-7333-426c-a2fc-a732d351a0f0] self.wait() [ 709.920092] env[62503]: ERROR nova.compute.manager [instance: d4930731-7333-426c-a2fc-a732d351a0f0] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 709.920092] env[62503]: ERROR nova.compute.manager [instance: d4930731-7333-426c-a2fc-a732d351a0f0] self[:] = self._gt.wait() [ 709.920092] env[62503]: ERROR nova.compute.manager [instance: d4930731-7333-426c-a2fc-a732d351a0f0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 709.920092] env[62503]: ERROR nova.compute.manager [instance: d4930731-7333-426c-a2fc-a732d351a0f0] return self._exit_event.wait() [ 709.920092] env[62503]: ERROR nova.compute.manager [instance: d4930731-7333-426c-a2fc-a732d351a0f0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 709.920486] env[62503]: ERROR nova.compute.manager [instance: d4930731-7333-426c-a2fc-a732d351a0f0] result = hub.switch() [ 709.920486] env[62503]: ERROR nova.compute.manager [instance: d4930731-7333-426c-a2fc-a732d351a0f0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 709.920486] env[62503]: ERROR nova.compute.manager [instance: d4930731-7333-426c-a2fc-a732d351a0f0] return self.greenlet.switch() [ 709.920486] env[62503]: ERROR nova.compute.manager [instance: d4930731-7333-426c-a2fc-a732d351a0f0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 709.920486] env[62503]: ERROR nova.compute.manager [instance: d4930731-7333-426c-a2fc-a732d351a0f0] result = function(*args, **kwargs) [ 709.920486] env[62503]: ERROR nova.compute.manager [instance: d4930731-7333-426c-a2fc-a732d351a0f0] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 709.920486] env[62503]: ERROR nova.compute.manager [instance: d4930731-7333-426c-a2fc-a732d351a0f0] return func(*args, **kwargs) [ 709.920486] env[62503]: ERROR nova.compute.manager [instance: d4930731-7333-426c-a2fc-a732d351a0f0] File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 709.920486] env[62503]: ERROR nova.compute.manager [instance: d4930731-7333-426c-a2fc-a732d351a0f0] raise e [ 709.920486] env[62503]: ERROR nova.compute.manager [instance: d4930731-7333-426c-a2fc-a732d351a0f0] File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 709.920486] env[62503]: ERROR nova.compute.manager [instance: d4930731-7333-426c-a2fc-a732d351a0f0] nwinfo = self.network_api.allocate_for_instance( [ 709.920486] env[62503]: ERROR nova.compute.manager [instance: d4930731-7333-426c-a2fc-a732d351a0f0] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 709.920486] env[62503]: ERROR nova.compute.manager [instance: d4930731-7333-426c-a2fc-a732d351a0f0] created_port_ids = self._update_ports_for_instance( [ 709.920796] env[62503]: ERROR nova.compute.manager [instance: d4930731-7333-426c-a2fc-a732d351a0f0] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 709.920796] env[62503]: ERROR nova.compute.manager [instance: d4930731-7333-426c-a2fc-a732d351a0f0] with excutils.save_and_reraise_exception(): [ 709.920796] env[62503]: ERROR nova.compute.manager [instance: d4930731-7333-426c-a2fc-a732d351a0f0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 709.920796] env[62503]: ERROR nova.compute.manager [instance: d4930731-7333-426c-a2fc-a732d351a0f0] self.force_reraise() [ 709.920796] env[62503]: ERROR nova.compute.manager [instance: d4930731-7333-426c-a2fc-a732d351a0f0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 709.920796] env[62503]: ERROR nova.compute.manager [instance: d4930731-7333-426c-a2fc-a732d351a0f0] raise self.value [ 709.920796] env[62503]: ERROR nova.compute.manager [instance: d4930731-7333-426c-a2fc-a732d351a0f0] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 709.920796] env[62503]: ERROR nova.compute.manager [instance: d4930731-7333-426c-a2fc-a732d351a0f0] updated_port = self._update_port( [ 709.920796] env[62503]: ERROR nova.compute.manager [instance: d4930731-7333-426c-a2fc-a732d351a0f0] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 709.920796] env[62503]: ERROR nova.compute.manager [instance: d4930731-7333-426c-a2fc-a732d351a0f0] _ensure_no_port_binding_failure(port) [ 709.920796] env[62503]: ERROR nova.compute.manager [instance: d4930731-7333-426c-a2fc-a732d351a0f0] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 709.920796] env[62503]: ERROR nova.compute.manager [instance: d4930731-7333-426c-a2fc-a732d351a0f0] raise exception.PortBindingFailed(port_id=port['id']) [ 709.921089] env[62503]: ERROR nova.compute.manager [instance: d4930731-7333-426c-a2fc-a732d351a0f0] nova.exception.PortBindingFailed: Binding failed for port 4a362122-9a64-4fbc-91b9-dff850e54b53, please check neutron logs for more information. [ 709.921089] env[62503]: ERROR nova.compute.manager [instance: d4930731-7333-426c-a2fc-a732d351a0f0] [ 709.921089] env[62503]: INFO nova.compute.manager [None req-e421dd54-c082-45de-8156-0a223999bd64 tempest-ServerRescueTestJSON-89698622 tempest-ServerRescueTestJSON-89698622-project-member] [instance: d4930731-7333-426c-a2fc-a732d351a0f0] Terminating instance [ 709.922619] env[62503]: DEBUG oslo_concurrency.lockutils [None req-e421dd54-c082-45de-8156-0a223999bd64 tempest-ServerRescueTestJSON-89698622 tempest-ServerRescueTestJSON-89698622-project-member] Acquiring lock "refresh_cache-d4930731-7333-426c-a2fc-a732d351a0f0" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 709.922783] env[62503]: DEBUG oslo_concurrency.lockutils [None req-e421dd54-c082-45de-8156-0a223999bd64 tempest-ServerRescueTestJSON-89698622 tempest-ServerRescueTestJSON-89698622-project-member] Acquired lock "refresh_cache-d4930731-7333-426c-a2fc-a732d351a0f0" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 709.922952] env[62503]: DEBUG nova.network.neutron [None req-e421dd54-c082-45de-8156-0a223999bd64 tempest-ServerRescueTestJSON-89698622 tempest-ServerRescueTestJSON-89698622-project-member] [instance: d4930731-7333-426c-a2fc-a732d351a0f0] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 710.133519] env[62503]: DEBUG oslo_concurrency.lockutils [req-57c7ae96-37c3-4857-88c2-17faf2cb6873 req-96a5d6d7-3200-4142-ae5f-125051354685 service nova] Releasing lock "refresh_cache-df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 710.133519] env[62503]: DEBUG nova.compute.manager [req-57c7ae96-37c3-4857-88c2-17faf2cb6873 req-96a5d6d7-3200-4142-ae5f-125051354685 service nova] [instance: df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36] Received event network-vif-deleted-9d3ab059-5e2e-4779-af52-242e031464dc {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 710.288583] env[62503]: DEBUG nova.compute.utils [None req-3e326ade-9b42-48b9-9ec9-471f7156ba7e tempest-AttachInterfacesV270Test-1411655473 tempest-AttachInterfacesV270Test-1411655473-project-member] Using /dev/sd instead of None {{(pid=62503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 710.290480] env[62503]: DEBUG nova.compute.manager [None req-3e326ade-9b42-48b9-9ec9-471f7156ba7e tempest-AttachInterfacesV270Test-1411655473 tempest-AttachInterfacesV270Test-1411655473-project-member] [instance: 9bda2d4c-38c0-49ba-9a69-402869ff6a65] Allocating IP information in the background. {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 710.294580] env[62503]: DEBUG nova.network.neutron [None req-3e326ade-9b42-48b9-9ec9-471f7156ba7e tempest-AttachInterfacesV270Test-1411655473 tempest-AttachInterfacesV270Test-1411655473-project-member] [instance: 9bda2d4c-38c0-49ba-9a69-402869ff6a65] allocate_for_instance() {{(pid=62503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 710.342147] env[62503]: DEBUG nova.policy [None req-3e326ade-9b42-48b9-9ec9-471f7156ba7e tempest-AttachInterfacesV270Test-1411655473 tempest-AttachInterfacesV270Test-1411655473-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7952ea6986ba467ba63fecf6a39d5a80', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e9f5ebf219ac468a930d25221239882f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62503) authorize /opt/stack/nova/nova/policy.py:201}} [ 710.611016] env[62503]: DEBUG nova.network.neutron [None req-e421dd54-c082-45de-8156-0a223999bd64 tempest-ServerRescueTestJSON-89698622 tempest-ServerRescueTestJSON-89698622-project-member] [instance: d4930731-7333-426c-a2fc-a732d351a0f0] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 710.636899] env[62503]: DEBUG nova.network.neutron [None req-3e326ade-9b42-48b9-9ec9-471f7156ba7e tempest-AttachInterfacesV270Test-1411655473 tempest-AttachInterfacesV270Test-1411655473-project-member] [instance: 9bda2d4c-38c0-49ba-9a69-402869ff6a65] Successfully created port: 81178767-32f1-432a-b245-c632c7753243 {{(pid=62503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 710.771744] env[62503]: DEBUG nova.network.neutron [None req-e421dd54-c082-45de-8156-0a223999bd64 tempest-ServerRescueTestJSON-89698622 tempest-ServerRescueTestJSON-89698622-project-member] [instance: d4930731-7333-426c-a2fc-a732d351a0f0] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 710.799219] env[62503]: DEBUG nova.compute.manager [None req-3e326ade-9b42-48b9-9ec9-471f7156ba7e tempest-AttachInterfacesV270Test-1411655473 tempest-AttachInterfacesV270Test-1411655473-project-member] [instance: 9bda2d4c-38c0-49ba-9a69-402869ff6a65] Start building block device mappings for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2867}} [ 711.052796] env[62503]: DEBUG nova.compute.manager [req-d9e9ab88-fed8-4677-8c14-ef74fa8db539 req-32d26f57-2989-4525-8726-b3ee67f2eefd service nova] [instance: d4930731-7333-426c-a2fc-a732d351a0f0] Received event network-changed-4a362122-9a64-4fbc-91b9-dff850e54b53 {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 711.052944] env[62503]: DEBUG nova.compute.manager [req-d9e9ab88-fed8-4677-8c14-ef74fa8db539 req-32d26f57-2989-4525-8726-b3ee67f2eefd service nova] [instance: d4930731-7333-426c-a2fc-a732d351a0f0] Refreshing instance network info cache due to event network-changed-4a362122-9a64-4fbc-91b9-dff850e54b53. {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11432}} [ 711.053186] env[62503]: DEBUG oslo_concurrency.lockutils [req-d9e9ab88-fed8-4677-8c14-ef74fa8db539 req-32d26f57-2989-4525-8726-b3ee67f2eefd service nova] Acquiring lock "refresh_cache-d4930731-7333-426c-a2fc-a732d351a0f0" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 711.191532] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7c47650-fb3b-4784-9677-cef3c0a2773e {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.199970] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42781780-5260-4b9e-a76a-326e2afe272a {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.236489] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d386a43-7ee0-4bdf-a6bf-279d5d47570c {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.245031] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10ecef8c-930e-43c0-a9fc-bb85639832b6 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.259773] env[62503]: DEBUG nova.compute.provider_tree [None req-9a47e67e-9a92-496f-b283-39f380d35bee tempest-ServerAddressesTestJSON-1019310862 tempest-ServerAddressesTestJSON-1019310862-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 711.275342] env[62503]: DEBUG oslo_concurrency.lockutils [None req-e421dd54-c082-45de-8156-0a223999bd64 tempest-ServerRescueTestJSON-89698622 tempest-ServerRescueTestJSON-89698622-project-member] Releasing lock "refresh_cache-d4930731-7333-426c-a2fc-a732d351a0f0" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 711.275687] env[62503]: DEBUG nova.compute.manager [None req-e421dd54-c082-45de-8156-0a223999bd64 tempest-ServerRescueTestJSON-89698622 tempest-ServerRescueTestJSON-89698622-project-member] [instance: d4930731-7333-426c-a2fc-a732d351a0f0] Start destroying the instance on the hypervisor. {{(pid=62503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3156}} [ 711.275875] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-e421dd54-c082-45de-8156-0a223999bd64 tempest-ServerRescueTestJSON-89698622 tempest-ServerRescueTestJSON-89698622-project-member] [instance: d4930731-7333-426c-a2fc-a732d351a0f0] Destroying instance {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 711.276523] env[62503]: DEBUG oslo_concurrency.lockutils [req-d9e9ab88-fed8-4677-8c14-ef74fa8db539 req-32d26f57-2989-4525-8726-b3ee67f2eefd service nova] Acquired lock "refresh_cache-d4930731-7333-426c-a2fc-a732d351a0f0" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 711.276741] env[62503]: DEBUG nova.network.neutron [req-d9e9ab88-fed8-4677-8c14-ef74fa8db539 req-32d26f57-2989-4525-8726-b3ee67f2eefd service nova] [instance: d4930731-7333-426c-a2fc-a732d351a0f0] Refreshing network info cache for port 4a362122-9a64-4fbc-91b9-dff850e54b53 {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 711.277689] env[62503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9fc02505-56b7-4b1f-8d4c-e4f5a1edac4d {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.289237] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a82d27c-3fc7-4383-ac51-3e915b7d68a5 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.318823] env[62503]: WARNING nova.virt.vmwareapi.vmops [None req-e421dd54-c082-45de-8156-0a223999bd64 tempest-ServerRescueTestJSON-89698622 tempest-ServerRescueTestJSON-89698622-project-member] [instance: d4930731-7333-426c-a2fc-a732d351a0f0] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance d4930731-7333-426c-a2fc-a732d351a0f0 could not be found. [ 711.318823] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-e421dd54-c082-45de-8156-0a223999bd64 tempest-ServerRescueTestJSON-89698622 tempest-ServerRescueTestJSON-89698622-project-member] [instance: d4930731-7333-426c-a2fc-a732d351a0f0] Instance destroyed {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 711.318823] env[62503]: INFO nova.compute.manager [None req-e421dd54-c082-45de-8156-0a223999bd64 tempest-ServerRescueTestJSON-89698622 tempest-ServerRescueTestJSON-89698622-project-member] [instance: d4930731-7333-426c-a2fc-a732d351a0f0] Took 0.04 seconds to destroy the instance on the hypervisor. [ 711.318823] env[62503]: DEBUG oslo.service.loopingcall [None req-e421dd54-c082-45de-8156-0a223999bd64 tempest-ServerRescueTestJSON-89698622 tempest-ServerRescueTestJSON-89698622-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 711.318823] env[62503]: DEBUG nova.compute.manager [-] [instance: d4930731-7333-426c-a2fc-a732d351a0f0] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 711.318823] env[62503]: DEBUG nova.network.neutron [-] [instance: d4930731-7333-426c-a2fc-a732d351a0f0] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 711.341343] env[62503]: DEBUG nova.network.neutron [-] [instance: d4930731-7333-426c-a2fc-a732d351a0f0] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 711.681647] env[62503]: ERROR nova.compute.manager [None req-3e326ade-9b42-48b9-9ec9-471f7156ba7e tempest-AttachInterfacesV270Test-1411655473 tempest-AttachInterfacesV270Test-1411655473-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 81178767-32f1-432a-b245-c632c7753243, please check neutron logs for more information. [ 711.681647] env[62503]: ERROR nova.compute.manager Traceback (most recent call last): [ 711.681647] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 711.681647] env[62503]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 711.681647] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 711.681647] env[62503]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 711.681647] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 711.681647] env[62503]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 711.681647] env[62503]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 711.681647] env[62503]: ERROR nova.compute.manager self.force_reraise() [ 711.681647] env[62503]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 711.681647] env[62503]: ERROR nova.compute.manager raise self.value [ 711.681647] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 711.681647] env[62503]: ERROR nova.compute.manager updated_port = self._update_port( [ 711.681647] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 711.681647] env[62503]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 711.682280] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 711.682280] env[62503]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 711.682280] env[62503]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 81178767-32f1-432a-b245-c632c7753243, please check neutron logs for more information. [ 711.682280] env[62503]: ERROR nova.compute.manager [ 711.682280] env[62503]: Traceback (most recent call last): [ 711.682280] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 711.682280] env[62503]: listener.cb(fileno) [ 711.682280] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 711.682280] env[62503]: result = function(*args, **kwargs) [ 711.682280] env[62503]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 711.682280] env[62503]: return func(*args, **kwargs) [ 711.682280] env[62503]: File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 711.682280] env[62503]: raise e [ 711.682280] env[62503]: File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 711.682280] env[62503]: nwinfo = self.network_api.allocate_for_instance( [ 711.682280] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 711.682280] env[62503]: created_port_ids = self._update_ports_for_instance( [ 711.682280] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 711.682280] env[62503]: with excutils.save_and_reraise_exception(): [ 711.682280] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 711.682280] env[62503]: self.force_reraise() [ 711.682280] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 711.682280] env[62503]: raise self.value [ 711.682280] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 711.682280] env[62503]: updated_port = self._update_port( [ 711.682280] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 711.682280] env[62503]: _ensure_no_port_binding_failure(port) [ 711.682280] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 711.682280] env[62503]: raise exception.PortBindingFailed(port_id=port['id']) [ 711.683144] env[62503]: nova.exception.PortBindingFailed: Binding failed for port 81178767-32f1-432a-b245-c632c7753243, please check neutron logs for more information. [ 711.683144] env[62503]: Removing descriptor: 14 [ 711.763101] env[62503]: DEBUG nova.scheduler.client.report [None req-9a47e67e-9a92-496f-b283-39f380d35bee tempest-ServerAddressesTestJSON-1019310862 tempest-ServerAddressesTestJSON-1019310862-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 711.797187] env[62503]: DEBUG nova.network.neutron [req-d9e9ab88-fed8-4677-8c14-ef74fa8db539 req-32d26f57-2989-4525-8726-b3ee67f2eefd service nova] [instance: d4930731-7333-426c-a2fc-a732d351a0f0] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 711.819135] env[62503]: DEBUG nova.compute.manager [None req-3e326ade-9b42-48b9-9ec9-471f7156ba7e tempest-AttachInterfacesV270Test-1411655473 tempest-AttachInterfacesV270Test-1411655473-project-member] [instance: 9bda2d4c-38c0-49ba-9a69-402869ff6a65] Start spawning the instance on the hypervisor. {{(pid=62503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2641}} [ 711.844202] env[62503]: DEBUG nova.virt.hardware [None req-3e326ade-9b42-48b9-9ec9-471f7156ba7e tempest-AttachInterfacesV270Test-1411655473 tempest-AttachInterfacesV270Test-1411655473-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:26:20Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:26:03Z,direct_url=,disk_format='vmdk',id=8150ca02-f879-471d-8913-459408f127a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26d9ee75d25b4018b8bb1fb11c8bd98c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:26:04Z,virtual_size=,visibility=), allow threads: False {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 711.844509] env[62503]: DEBUG nova.virt.hardware [None req-3e326ade-9b42-48b9-9ec9-471f7156ba7e tempest-AttachInterfacesV270Test-1411655473 tempest-AttachInterfacesV270Test-1411655473-project-member] Flavor limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 711.844646] env[62503]: DEBUG nova.virt.hardware [None req-3e326ade-9b42-48b9-9ec9-471f7156ba7e tempest-AttachInterfacesV270Test-1411655473 tempest-AttachInterfacesV270Test-1411655473-project-member] Image limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 711.844825] env[62503]: DEBUG nova.virt.hardware [None req-3e326ade-9b42-48b9-9ec9-471f7156ba7e tempest-AttachInterfacesV270Test-1411655473 tempest-AttachInterfacesV270Test-1411655473-project-member] Flavor pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 711.844969] env[62503]: DEBUG nova.virt.hardware [None req-3e326ade-9b42-48b9-9ec9-471f7156ba7e tempest-AttachInterfacesV270Test-1411655473 tempest-AttachInterfacesV270Test-1411655473-project-member] Image pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 711.846020] env[62503]: DEBUG nova.virt.hardware [None req-3e326ade-9b42-48b9-9ec9-471f7156ba7e tempest-AttachInterfacesV270Test-1411655473 tempest-AttachInterfacesV270Test-1411655473-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 711.846020] env[62503]: DEBUG nova.virt.hardware [None req-3e326ade-9b42-48b9-9ec9-471f7156ba7e tempest-AttachInterfacesV270Test-1411655473 tempest-AttachInterfacesV270Test-1411655473-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 711.846266] env[62503]: DEBUG nova.virt.hardware [None req-3e326ade-9b42-48b9-9ec9-471f7156ba7e tempest-AttachInterfacesV270Test-1411655473 tempest-AttachInterfacesV270Test-1411655473-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 711.850019] env[62503]: DEBUG nova.virt.hardware [None req-3e326ade-9b42-48b9-9ec9-471f7156ba7e tempest-AttachInterfacesV270Test-1411655473 tempest-AttachInterfacesV270Test-1411655473-project-member] Got 1 possible topologies {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 711.850019] env[62503]: DEBUG nova.virt.hardware [None req-3e326ade-9b42-48b9-9ec9-471f7156ba7e tempest-AttachInterfacesV270Test-1411655473 tempest-AttachInterfacesV270Test-1411655473-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 711.850019] env[62503]: DEBUG nova.virt.hardware [None req-3e326ade-9b42-48b9-9ec9-471f7156ba7e tempest-AttachInterfacesV270Test-1411655473 tempest-AttachInterfacesV270Test-1411655473-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 711.850019] env[62503]: DEBUG nova.network.neutron [-] [instance: d4930731-7333-426c-a2fc-a732d351a0f0] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 711.850019] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c381cbcf-f625-42bc-9164-6ed3cf54d309 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.860391] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-492aa1a2-14d3-41fb-beec-d4ace9800ec2 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.874870] env[62503]: ERROR nova.compute.manager [None req-3e326ade-9b42-48b9-9ec9-471f7156ba7e tempest-AttachInterfacesV270Test-1411655473 tempest-AttachInterfacesV270Test-1411655473-project-member] [instance: 9bda2d4c-38c0-49ba-9a69-402869ff6a65] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 81178767-32f1-432a-b245-c632c7753243, please check neutron logs for more information. [ 711.874870] env[62503]: ERROR nova.compute.manager [instance: 9bda2d4c-38c0-49ba-9a69-402869ff6a65] Traceback (most recent call last): [ 711.874870] env[62503]: ERROR nova.compute.manager [instance: 9bda2d4c-38c0-49ba-9a69-402869ff6a65] File "/opt/stack/nova/nova/compute/manager.py", line 2897, in _build_resources [ 711.874870] env[62503]: ERROR nova.compute.manager [instance: 9bda2d4c-38c0-49ba-9a69-402869ff6a65] yield resources [ 711.874870] env[62503]: ERROR nova.compute.manager [instance: 9bda2d4c-38c0-49ba-9a69-402869ff6a65] File "/opt/stack/nova/nova/compute/manager.py", line 2644, in _build_and_run_instance [ 711.874870] env[62503]: ERROR nova.compute.manager [instance: 9bda2d4c-38c0-49ba-9a69-402869ff6a65] self.driver.spawn(context, instance, image_meta, [ 711.874870] env[62503]: ERROR nova.compute.manager [instance: 9bda2d4c-38c0-49ba-9a69-402869ff6a65] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 711.874870] env[62503]: ERROR nova.compute.manager [instance: 9bda2d4c-38c0-49ba-9a69-402869ff6a65] self._vmops.spawn(context, instance, image_meta, injected_files, [ 711.874870] env[62503]: ERROR nova.compute.manager [instance: 9bda2d4c-38c0-49ba-9a69-402869ff6a65] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 711.874870] env[62503]: ERROR nova.compute.manager [instance: 9bda2d4c-38c0-49ba-9a69-402869ff6a65] vm_ref = self.build_virtual_machine(instance, [ 711.874870] env[62503]: ERROR nova.compute.manager [instance: 9bda2d4c-38c0-49ba-9a69-402869ff6a65] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 711.875283] env[62503]: ERROR nova.compute.manager [instance: 9bda2d4c-38c0-49ba-9a69-402869ff6a65] vif_infos = vmwarevif.get_vif_info(self._session, [ 711.875283] env[62503]: ERROR nova.compute.manager [instance: 9bda2d4c-38c0-49ba-9a69-402869ff6a65] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 711.875283] env[62503]: ERROR nova.compute.manager [instance: 9bda2d4c-38c0-49ba-9a69-402869ff6a65] for vif in network_info: [ 711.875283] env[62503]: ERROR nova.compute.manager [instance: 9bda2d4c-38c0-49ba-9a69-402869ff6a65] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 711.875283] env[62503]: ERROR nova.compute.manager [instance: 9bda2d4c-38c0-49ba-9a69-402869ff6a65] return self._sync_wrapper(fn, *args, **kwargs) [ 711.875283] env[62503]: ERROR nova.compute.manager [instance: 9bda2d4c-38c0-49ba-9a69-402869ff6a65] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 711.875283] env[62503]: ERROR nova.compute.manager [instance: 9bda2d4c-38c0-49ba-9a69-402869ff6a65] self.wait() [ 711.875283] env[62503]: ERROR nova.compute.manager [instance: 9bda2d4c-38c0-49ba-9a69-402869ff6a65] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 711.875283] env[62503]: ERROR nova.compute.manager [instance: 9bda2d4c-38c0-49ba-9a69-402869ff6a65] self[:] = self._gt.wait() [ 711.875283] env[62503]: ERROR nova.compute.manager [instance: 9bda2d4c-38c0-49ba-9a69-402869ff6a65] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 711.875283] env[62503]: ERROR nova.compute.manager [instance: 9bda2d4c-38c0-49ba-9a69-402869ff6a65] return self._exit_event.wait() [ 711.875283] env[62503]: ERROR nova.compute.manager [instance: 9bda2d4c-38c0-49ba-9a69-402869ff6a65] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 711.875283] env[62503]: ERROR nova.compute.manager [instance: 9bda2d4c-38c0-49ba-9a69-402869ff6a65] current.throw(*self._exc) [ 711.875662] env[62503]: ERROR nova.compute.manager [instance: 9bda2d4c-38c0-49ba-9a69-402869ff6a65] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 711.875662] env[62503]: ERROR nova.compute.manager [instance: 9bda2d4c-38c0-49ba-9a69-402869ff6a65] result = function(*args, **kwargs) [ 711.875662] env[62503]: ERROR nova.compute.manager [instance: 9bda2d4c-38c0-49ba-9a69-402869ff6a65] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 711.875662] env[62503]: ERROR nova.compute.manager [instance: 9bda2d4c-38c0-49ba-9a69-402869ff6a65] return func(*args, **kwargs) [ 711.875662] env[62503]: ERROR nova.compute.manager [instance: 9bda2d4c-38c0-49ba-9a69-402869ff6a65] File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 711.875662] env[62503]: ERROR nova.compute.manager [instance: 9bda2d4c-38c0-49ba-9a69-402869ff6a65] raise e [ 711.875662] env[62503]: ERROR nova.compute.manager [instance: 9bda2d4c-38c0-49ba-9a69-402869ff6a65] File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 711.875662] env[62503]: ERROR nova.compute.manager [instance: 9bda2d4c-38c0-49ba-9a69-402869ff6a65] nwinfo = self.network_api.allocate_for_instance( [ 711.875662] env[62503]: ERROR nova.compute.manager [instance: 9bda2d4c-38c0-49ba-9a69-402869ff6a65] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 711.875662] env[62503]: ERROR nova.compute.manager [instance: 9bda2d4c-38c0-49ba-9a69-402869ff6a65] created_port_ids = self._update_ports_for_instance( [ 711.875662] env[62503]: ERROR nova.compute.manager [instance: 9bda2d4c-38c0-49ba-9a69-402869ff6a65] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 711.875662] env[62503]: ERROR nova.compute.manager [instance: 9bda2d4c-38c0-49ba-9a69-402869ff6a65] with excutils.save_and_reraise_exception(): [ 711.875662] env[62503]: ERROR nova.compute.manager [instance: 9bda2d4c-38c0-49ba-9a69-402869ff6a65] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 711.876194] env[62503]: ERROR nova.compute.manager [instance: 9bda2d4c-38c0-49ba-9a69-402869ff6a65] self.force_reraise() [ 711.876194] env[62503]: ERROR nova.compute.manager [instance: 9bda2d4c-38c0-49ba-9a69-402869ff6a65] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 711.876194] env[62503]: ERROR nova.compute.manager [instance: 9bda2d4c-38c0-49ba-9a69-402869ff6a65] raise self.value [ 711.876194] env[62503]: ERROR nova.compute.manager [instance: 9bda2d4c-38c0-49ba-9a69-402869ff6a65] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 711.876194] env[62503]: ERROR nova.compute.manager [instance: 9bda2d4c-38c0-49ba-9a69-402869ff6a65] updated_port = self._update_port( [ 711.876194] env[62503]: ERROR nova.compute.manager [instance: 9bda2d4c-38c0-49ba-9a69-402869ff6a65] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 711.876194] env[62503]: ERROR nova.compute.manager [instance: 9bda2d4c-38c0-49ba-9a69-402869ff6a65] _ensure_no_port_binding_failure(port) [ 711.876194] env[62503]: ERROR nova.compute.manager [instance: 9bda2d4c-38c0-49ba-9a69-402869ff6a65] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 711.876194] env[62503]: ERROR nova.compute.manager [instance: 9bda2d4c-38c0-49ba-9a69-402869ff6a65] raise exception.PortBindingFailed(port_id=port['id']) [ 711.876194] env[62503]: ERROR nova.compute.manager [instance: 9bda2d4c-38c0-49ba-9a69-402869ff6a65] nova.exception.PortBindingFailed: Binding failed for port 81178767-32f1-432a-b245-c632c7753243, please check neutron logs for more information. [ 711.876194] env[62503]: ERROR nova.compute.manager [instance: 9bda2d4c-38c0-49ba-9a69-402869ff6a65] [ 711.876194] env[62503]: INFO nova.compute.manager [None req-3e326ade-9b42-48b9-9ec9-471f7156ba7e tempest-AttachInterfacesV270Test-1411655473 tempest-AttachInterfacesV270Test-1411655473-project-member] [instance: 9bda2d4c-38c0-49ba-9a69-402869ff6a65] Terminating instance [ 711.877045] env[62503]: DEBUG oslo_concurrency.lockutils [None req-3e326ade-9b42-48b9-9ec9-471f7156ba7e tempest-AttachInterfacesV270Test-1411655473 tempest-AttachInterfacesV270Test-1411655473-project-member] Acquiring lock "refresh_cache-9bda2d4c-38c0-49ba-9a69-402869ff6a65" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 711.877210] env[62503]: DEBUG oslo_concurrency.lockutils [None req-3e326ade-9b42-48b9-9ec9-471f7156ba7e tempest-AttachInterfacesV270Test-1411655473 tempest-AttachInterfacesV270Test-1411655473-project-member] Acquired lock "refresh_cache-9bda2d4c-38c0-49ba-9a69-402869ff6a65" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 711.877373] env[62503]: DEBUG nova.network.neutron [None req-3e326ade-9b42-48b9-9ec9-471f7156ba7e tempest-AttachInterfacesV270Test-1411655473 tempest-AttachInterfacesV270Test-1411655473-project-member] [instance: 9bda2d4c-38c0-49ba-9a69-402869ff6a65] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 711.896052] env[62503]: DEBUG nova.network.neutron [req-d9e9ab88-fed8-4677-8c14-ef74fa8db539 req-32d26f57-2989-4525-8726-b3ee67f2eefd service nova] [instance: d4930731-7333-426c-a2fc-a732d351a0f0] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 712.267929] env[62503]: DEBUG oslo_concurrency.lockutils [None req-9a47e67e-9a92-496f-b283-39f380d35bee tempest-ServerAddressesTestJSON-1019310862 tempest-ServerAddressesTestJSON-1019310862-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.490s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 712.268540] env[62503]: DEBUG nova.compute.manager [None req-9a47e67e-9a92-496f-b283-39f380d35bee tempest-ServerAddressesTestJSON-1019310862 tempest-ServerAddressesTestJSON-1019310862-project-member] [instance: ce8d9b01-e99d-4051-bd96-659692a436da] Start building networks asynchronously for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2832}} [ 712.271606] env[62503]: DEBUG oslo_concurrency.lockutils [None req-b3d1021d-dab1-43cf-b5d3-632b4017ba90 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.122s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 712.272976] env[62503]: INFO nova.compute.claims [None req-b3d1021d-dab1-43cf-b5d3-632b4017ba90 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] [instance: 3178d5cd-1937-422b-9287-970d095aa452] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 712.353885] env[62503]: INFO nova.compute.manager [-] [instance: d4930731-7333-426c-a2fc-a732d351a0f0] Took 1.04 seconds to deallocate network for instance. [ 712.356139] env[62503]: DEBUG nova.compute.claims [None req-e421dd54-c082-45de-8156-0a223999bd64 tempest-ServerRescueTestJSON-89698622 tempest-ServerRescueTestJSON-89698622-project-member] [instance: d4930731-7333-426c-a2fc-a732d351a0f0] Aborting claim: {{(pid=62503) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 712.356343] env[62503]: DEBUG oslo_concurrency.lockutils [None req-e421dd54-c082-45de-8156-0a223999bd64 tempest-ServerRescueTestJSON-89698622 tempest-ServerRescueTestJSON-89698622-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 712.393984] env[62503]: DEBUG nova.network.neutron [None req-3e326ade-9b42-48b9-9ec9-471f7156ba7e tempest-AttachInterfacesV270Test-1411655473 tempest-AttachInterfacesV270Test-1411655473-project-member] [instance: 9bda2d4c-38c0-49ba-9a69-402869ff6a65] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 712.398234] env[62503]: DEBUG oslo_concurrency.lockutils [req-d9e9ab88-fed8-4677-8c14-ef74fa8db539 req-32d26f57-2989-4525-8726-b3ee67f2eefd service nova] Releasing lock "refresh_cache-d4930731-7333-426c-a2fc-a732d351a0f0" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 712.398477] env[62503]: DEBUG nova.compute.manager [req-d9e9ab88-fed8-4677-8c14-ef74fa8db539 req-32d26f57-2989-4525-8726-b3ee67f2eefd service nova] [instance: d4930731-7333-426c-a2fc-a732d351a0f0] Received event network-vif-deleted-4a362122-9a64-4fbc-91b9-dff850e54b53 {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 712.486821] env[62503]: DEBUG nova.network.neutron [None req-3e326ade-9b42-48b9-9ec9-471f7156ba7e tempest-AttachInterfacesV270Test-1411655473 tempest-AttachInterfacesV270Test-1411655473-project-member] [instance: 9bda2d4c-38c0-49ba-9a69-402869ff6a65] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 712.777434] env[62503]: DEBUG nova.compute.utils [None req-9a47e67e-9a92-496f-b283-39f380d35bee tempest-ServerAddressesTestJSON-1019310862 tempest-ServerAddressesTestJSON-1019310862-project-member] Using /dev/sd instead of None {{(pid=62503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 712.780659] env[62503]: DEBUG nova.compute.manager [None req-9a47e67e-9a92-496f-b283-39f380d35bee tempest-ServerAddressesTestJSON-1019310862 tempest-ServerAddressesTestJSON-1019310862-project-member] [instance: ce8d9b01-e99d-4051-bd96-659692a436da] Allocating IP information in the background. {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 712.780829] env[62503]: DEBUG nova.network.neutron [None req-9a47e67e-9a92-496f-b283-39f380d35bee tempest-ServerAddressesTestJSON-1019310862 tempest-ServerAddressesTestJSON-1019310862-project-member] [instance: ce8d9b01-e99d-4051-bd96-659692a436da] allocate_for_instance() {{(pid=62503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 712.843956] env[62503]: DEBUG nova.policy [None req-9a47e67e-9a92-496f-b283-39f380d35bee tempest-ServerAddressesTestJSON-1019310862 tempest-ServerAddressesTestJSON-1019310862-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '620f078777b54ef9a9aed0300d54c76d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a9601deb400d47ed86c07a313b2ecdd7', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62503) authorize /opt/stack/nova/nova/policy.py:201}} [ 712.877709] env[62503]: DEBUG oslo_concurrency.lockutils [None req-337e5891-71cd-4f0a-a7c5-d668c349bcf9 tempest-ServersAaction247Test-815974925 tempest-ServersAaction247Test-815974925-project-member] Acquiring lock "ef92e4ba-4ef3-4e26-9577-bad0c046ed47" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 712.877940] env[62503]: DEBUG oslo_concurrency.lockutils [None req-337e5891-71cd-4f0a-a7c5-d668c349bcf9 tempest-ServersAaction247Test-815974925 tempest-ServersAaction247Test-815974925-project-member] Lock "ef92e4ba-4ef3-4e26-9577-bad0c046ed47" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 712.989765] env[62503]: DEBUG oslo_concurrency.lockutils [None req-3e326ade-9b42-48b9-9ec9-471f7156ba7e tempest-AttachInterfacesV270Test-1411655473 tempest-AttachInterfacesV270Test-1411655473-project-member] Releasing lock "refresh_cache-9bda2d4c-38c0-49ba-9a69-402869ff6a65" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 712.990220] env[62503]: DEBUG nova.compute.manager [None req-3e326ade-9b42-48b9-9ec9-471f7156ba7e tempest-AttachInterfacesV270Test-1411655473 tempest-AttachInterfacesV270Test-1411655473-project-member] [instance: 9bda2d4c-38c0-49ba-9a69-402869ff6a65] Start destroying the instance on the hypervisor. {{(pid=62503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3156}} [ 712.990447] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-3e326ade-9b42-48b9-9ec9-471f7156ba7e tempest-AttachInterfacesV270Test-1411655473 tempest-AttachInterfacesV270Test-1411655473-project-member] [instance: 9bda2d4c-38c0-49ba-9a69-402869ff6a65] Destroying instance {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 712.990732] env[62503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-470d292c-6af6-4d81-835c-8c1ecea4070b {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.999535] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13d06404-fc68-4766-a7cd-9f7106cc2d1c {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.023433] env[62503]: WARNING nova.virt.vmwareapi.vmops [None req-3e326ade-9b42-48b9-9ec9-471f7156ba7e tempest-AttachInterfacesV270Test-1411655473 tempest-AttachInterfacesV270Test-1411655473-project-member] [instance: 9bda2d4c-38c0-49ba-9a69-402869ff6a65] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 9bda2d4c-38c0-49ba-9a69-402869ff6a65 could not be found. [ 713.023657] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-3e326ade-9b42-48b9-9ec9-471f7156ba7e tempest-AttachInterfacesV270Test-1411655473 tempest-AttachInterfacesV270Test-1411655473-project-member] [instance: 9bda2d4c-38c0-49ba-9a69-402869ff6a65] Instance destroyed {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 713.023835] env[62503]: INFO nova.compute.manager [None req-3e326ade-9b42-48b9-9ec9-471f7156ba7e tempest-AttachInterfacesV270Test-1411655473 tempest-AttachInterfacesV270Test-1411655473-project-member] [instance: 9bda2d4c-38c0-49ba-9a69-402869ff6a65] Took 0.03 seconds to destroy the instance on the hypervisor. [ 713.024081] env[62503]: DEBUG oslo.service.loopingcall [None req-3e326ade-9b42-48b9-9ec9-471f7156ba7e tempest-AttachInterfacesV270Test-1411655473 tempest-AttachInterfacesV270Test-1411655473-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 713.024855] env[62503]: DEBUG nova.compute.manager [-] [instance: 9bda2d4c-38c0-49ba-9a69-402869ff6a65] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 713.024855] env[62503]: DEBUG nova.network.neutron [-] [instance: 9bda2d4c-38c0-49ba-9a69-402869ff6a65] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 713.039855] env[62503]: DEBUG nova.network.neutron [-] [instance: 9bda2d4c-38c0-49ba-9a69-402869ff6a65] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 713.081472] env[62503]: DEBUG nova.compute.manager [req-a2844889-4b10-48d7-896c-e6b8755ee2a7 req-c31ffbbc-6b8a-4c12-a77e-60f74828bc65 service nova] [instance: 9bda2d4c-38c0-49ba-9a69-402869ff6a65] Received event network-changed-81178767-32f1-432a-b245-c632c7753243 {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 713.081655] env[62503]: DEBUG nova.compute.manager [req-a2844889-4b10-48d7-896c-e6b8755ee2a7 req-c31ffbbc-6b8a-4c12-a77e-60f74828bc65 service nova] [instance: 9bda2d4c-38c0-49ba-9a69-402869ff6a65] Refreshing instance network info cache due to event network-changed-81178767-32f1-432a-b245-c632c7753243. {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11432}} [ 713.081862] env[62503]: DEBUG oslo_concurrency.lockutils [req-a2844889-4b10-48d7-896c-e6b8755ee2a7 req-c31ffbbc-6b8a-4c12-a77e-60f74828bc65 service nova] Acquiring lock "refresh_cache-9bda2d4c-38c0-49ba-9a69-402869ff6a65" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 713.081994] env[62503]: DEBUG oslo_concurrency.lockutils [req-a2844889-4b10-48d7-896c-e6b8755ee2a7 req-c31ffbbc-6b8a-4c12-a77e-60f74828bc65 service nova] Acquired lock "refresh_cache-9bda2d4c-38c0-49ba-9a69-402869ff6a65" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 713.082197] env[62503]: DEBUG nova.network.neutron [req-a2844889-4b10-48d7-896c-e6b8755ee2a7 req-c31ffbbc-6b8a-4c12-a77e-60f74828bc65 service nova] [instance: 9bda2d4c-38c0-49ba-9a69-402869ff6a65] Refreshing network info cache for port 81178767-32f1-432a-b245-c632c7753243 {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 713.159325] env[62503]: DEBUG nova.network.neutron [None req-9a47e67e-9a92-496f-b283-39f380d35bee tempest-ServerAddressesTestJSON-1019310862 tempest-ServerAddressesTestJSON-1019310862-project-member] [instance: ce8d9b01-e99d-4051-bd96-659692a436da] Successfully created port: 80bd40c8-505a-4b91-a56d-00f1135ef5cf {{(pid=62503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 713.283409] env[62503]: DEBUG nova.compute.manager [None req-9a47e67e-9a92-496f-b283-39f380d35bee tempest-ServerAddressesTestJSON-1019310862 tempest-ServerAddressesTestJSON-1019310862-project-member] [instance: ce8d9b01-e99d-4051-bd96-659692a436da] Start building block device mappings for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2867}} [ 713.544702] env[62503]: DEBUG nova.network.neutron [-] [instance: 9bda2d4c-38c0-49ba-9a69-402869ff6a65] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 713.605543] env[62503]: DEBUG nova.network.neutron [req-a2844889-4b10-48d7-896c-e6b8755ee2a7 req-c31ffbbc-6b8a-4c12-a77e-60f74828bc65 service nova] [instance: 9bda2d4c-38c0-49ba-9a69-402869ff6a65] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 713.654195] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-758e8db4-c512-4413-96b3-f9aa74b358c2 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.667108] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c4aa554-14f3-46ce-bb65-3c677033d1bf {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.704860] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fdc472b-f9b0-4290-bc0f-526e35944656 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.713033] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce3bed88-0514-4867-9c9b-ebe5df352c88 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.730784] env[62503]: DEBUG nova.compute.provider_tree [None req-b3d1021d-dab1-43cf-b5d3-632b4017ba90 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 713.773153] env[62503]: DEBUG nova.network.neutron [req-a2844889-4b10-48d7-896c-e6b8755ee2a7 req-c31ffbbc-6b8a-4c12-a77e-60f74828bc65 service nova] [instance: 9bda2d4c-38c0-49ba-9a69-402869ff6a65] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 714.048665] env[62503]: INFO nova.compute.manager [-] [instance: 9bda2d4c-38c0-49ba-9a69-402869ff6a65] Took 1.02 seconds to deallocate network for instance. [ 714.051228] env[62503]: DEBUG nova.compute.claims [None req-3e326ade-9b42-48b9-9ec9-471f7156ba7e tempest-AttachInterfacesV270Test-1411655473 tempest-AttachInterfacesV270Test-1411655473-project-member] [instance: 9bda2d4c-38c0-49ba-9a69-402869ff6a65] Aborting claim: {{(pid=62503) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 714.051351] env[62503]: DEBUG oslo_concurrency.lockutils [None req-3e326ade-9b42-48b9-9ec9-471f7156ba7e tempest-AttachInterfacesV270Test-1411655473 tempest-AttachInterfacesV270Test-1411655473-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 714.235123] env[62503]: DEBUG nova.scheduler.client.report [None req-b3d1021d-dab1-43cf-b5d3-632b4017ba90 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 714.239121] env[62503]: ERROR nova.compute.manager [None req-9a47e67e-9a92-496f-b283-39f380d35bee tempest-ServerAddressesTestJSON-1019310862 tempest-ServerAddressesTestJSON-1019310862-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 80bd40c8-505a-4b91-a56d-00f1135ef5cf, please check neutron logs for more information. [ 714.239121] env[62503]: ERROR nova.compute.manager Traceback (most recent call last): [ 714.239121] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 714.239121] env[62503]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 714.239121] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 714.239121] env[62503]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 714.239121] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 714.239121] env[62503]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 714.239121] env[62503]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 714.239121] env[62503]: ERROR nova.compute.manager self.force_reraise() [ 714.239121] env[62503]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 714.239121] env[62503]: ERROR nova.compute.manager raise self.value [ 714.239121] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 714.239121] env[62503]: ERROR nova.compute.manager updated_port = self._update_port( [ 714.239121] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 714.239121] env[62503]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 714.239578] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 714.239578] env[62503]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 714.239578] env[62503]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 80bd40c8-505a-4b91-a56d-00f1135ef5cf, please check neutron logs for more information. [ 714.239578] env[62503]: ERROR nova.compute.manager [ 714.239578] env[62503]: Traceback (most recent call last): [ 714.239578] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 714.239578] env[62503]: listener.cb(fileno) [ 714.239578] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 714.239578] env[62503]: result = function(*args, **kwargs) [ 714.239578] env[62503]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 714.239578] env[62503]: return func(*args, **kwargs) [ 714.239578] env[62503]: File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 714.239578] env[62503]: raise e [ 714.239578] env[62503]: File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 714.239578] env[62503]: nwinfo = self.network_api.allocate_for_instance( [ 714.239578] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 714.239578] env[62503]: created_port_ids = self._update_ports_for_instance( [ 714.239578] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 714.239578] env[62503]: with excutils.save_and_reraise_exception(): [ 714.239578] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 714.239578] env[62503]: self.force_reraise() [ 714.239578] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 714.239578] env[62503]: raise self.value [ 714.239578] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 714.239578] env[62503]: updated_port = self._update_port( [ 714.239578] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 714.239578] env[62503]: _ensure_no_port_binding_failure(port) [ 714.239578] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 714.239578] env[62503]: raise exception.PortBindingFailed(port_id=port['id']) [ 714.240885] env[62503]: nova.exception.PortBindingFailed: Binding failed for port 80bd40c8-505a-4b91-a56d-00f1135ef5cf, please check neutron logs for more information. [ 714.240885] env[62503]: Removing descriptor: 14 [ 714.275463] env[62503]: DEBUG oslo_concurrency.lockutils [req-a2844889-4b10-48d7-896c-e6b8755ee2a7 req-c31ffbbc-6b8a-4c12-a77e-60f74828bc65 service nova] Releasing lock "refresh_cache-9bda2d4c-38c0-49ba-9a69-402869ff6a65" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 714.275741] env[62503]: DEBUG nova.compute.manager [req-a2844889-4b10-48d7-896c-e6b8755ee2a7 req-c31ffbbc-6b8a-4c12-a77e-60f74828bc65 service nova] [instance: 9bda2d4c-38c0-49ba-9a69-402869ff6a65] Received event network-vif-deleted-81178767-32f1-432a-b245-c632c7753243 {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 714.294762] env[62503]: DEBUG nova.compute.manager [None req-9a47e67e-9a92-496f-b283-39f380d35bee tempest-ServerAddressesTestJSON-1019310862 tempest-ServerAddressesTestJSON-1019310862-project-member] [instance: ce8d9b01-e99d-4051-bd96-659692a436da] Start spawning the instance on the hypervisor. {{(pid=62503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2641}} [ 714.338482] env[62503]: DEBUG nova.virt.hardware [None req-9a47e67e-9a92-496f-b283-39f380d35bee tempest-ServerAddressesTestJSON-1019310862 tempest-ServerAddressesTestJSON-1019310862-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:26:20Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:26:03Z,direct_url=,disk_format='vmdk',id=8150ca02-f879-471d-8913-459408f127a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26d9ee75d25b4018b8bb1fb11c8bd98c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:26:04Z,virtual_size=,visibility=), allow threads: False {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 714.338946] env[62503]: DEBUG nova.virt.hardware [None req-9a47e67e-9a92-496f-b283-39f380d35bee tempest-ServerAddressesTestJSON-1019310862 tempest-ServerAddressesTestJSON-1019310862-project-member] Flavor limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 714.339306] env[62503]: DEBUG nova.virt.hardware [None req-9a47e67e-9a92-496f-b283-39f380d35bee tempest-ServerAddressesTestJSON-1019310862 tempest-ServerAddressesTestJSON-1019310862-project-member] Image limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 714.339726] env[62503]: DEBUG nova.virt.hardware [None req-9a47e67e-9a92-496f-b283-39f380d35bee tempest-ServerAddressesTestJSON-1019310862 tempest-ServerAddressesTestJSON-1019310862-project-member] Flavor pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 714.340063] env[62503]: DEBUG nova.virt.hardware [None req-9a47e67e-9a92-496f-b283-39f380d35bee tempest-ServerAddressesTestJSON-1019310862 tempest-ServerAddressesTestJSON-1019310862-project-member] Image pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 714.340395] env[62503]: DEBUG nova.virt.hardware [None req-9a47e67e-9a92-496f-b283-39f380d35bee tempest-ServerAddressesTestJSON-1019310862 tempest-ServerAddressesTestJSON-1019310862-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 714.340848] env[62503]: DEBUG nova.virt.hardware [None req-9a47e67e-9a92-496f-b283-39f380d35bee tempest-ServerAddressesTestJSON-1019310862 tempest-ServerAddressesTestJSON-1019310862-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 714.341208] env[62503]: DEBUG nova.virt.hardware [None req-9a47e67e-9a92-496f-b283-39f380d35bee tempest-ServerAddressesTestJSON-1019310862 tempest-ServerAddressesTestJSON-1019310862-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 714.341572] env[62503]: DEBUG nova.virt.hardware [None req-9a47e67e-9a92-496f-b283-39f380d35bee tempest-ServerAddressesTestJSON-1019310862 tempest-ServerAddressesTestJSON-1019310862-project-member] Got 1 possible topologies {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 714.341920] env[62503]: DEBUG nova.virt.hardware [None req-9a47e67e-9a92-496f-b283-39f380d35bee tempest-ServerAddressesTestJSON-1019310862 tempest-ServerAddressesTestJSON-1019310862-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 714.342326] env[62503]: DEBUG nova.virt.hardware [None req-9a47e67e-9a92-496f-b283-39f380d35bee tempest-ServerAddressesTestJSON-1019310862 tempest-ServerAddressesTestJSON-1019310862-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 714.343733] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63e0b362-7eb5-4ec0-a871-ad62e72efcb5 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.355705] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3865fd8f-a93d-4b9f-b821-33876bb1b3ea {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.379776] env[62503]: ERROR nova.compute.manager [None req-9a47e67e-9a92-496f-b283-39f380d35bee tempest-ServerAddressesTestJSON-1019310862 tempest-ServerAddressesTestJSON-1019310862-project-member] [instance: ce8d9b01-e99d-4051-bd96-659692a436da] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 80bd40c8-505a-4b91-a56d-00f1135ef5cf, please check neutron logs for more information. [ 714.379776] env[62503]: ERROR nova.compute.manager [instance: ce8d9b01-e99d-4051-bd96-659692a436da] Traceback (most recent call last): [ 714.379776] env[62503]: ERROR nova.compute.manager [instance: ce8d9b01-e99d-4051-bd96-659692a436da] File "/opt/stack/nova/nova/compute/manager.py", line 2897, in _build_resources [ 714.379776] env[62503]: ERROR nova.compute.manager [instance: ce8d9b01-e99d-4051-bd96-659692a436da] yield resources [ 714.379776] env[62503]: ERROR nova.compute.manager [instance: ce8d9b01-e99d-4051-bd96-659692a436da] File "/opt/stack/nova/nova/compute/manager.py", line 2644, in _build_and_run_instance [ 714.379776] env[62503]: ERROR nova.compute.manager [instance: ce8d9b01-e99d-4051-bd96-659692a436da] self.driver.spawn(context, instance, image_meta, [ 714.379776] env[62503]: ERROR nova.compute.manager [instance: ce8d9b01-e99d-4051-bd96-659692a436da] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 714.379776] env[62503]: ERROR nova.compute.manager [instance: ce8d9b01-e99d-4051-bd96-659692a436da] self._vmops.spawn(context, instance, image_meta, injected_files, [ 714.379776] env[62503]: ERROR nova.compute.manager [instance: ce8d9b01-e99d-4051-bd96-659692a436da] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 714.379776] env[62503]: ERROR nova.compute.manager [instance: ce8d9b01-e99d-4051-bd96-659692a436da] vm_ref = self.build_virtual_machine(instance, [ 714.379776] env[62503]: ERROR nova.compute.manager [instance: ce8d9b01-e99d-4051-bd96-659692a436da] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 714.380128] env[62503]: ERROR nova.compute.manager [instance: ce8d9b01-e99d-4051-bd96-659692a436da] vif_infos = vmwarevif.get_vif_info(self._session, [ 714.380128] env[62503]: ERROR nova.compute.manager [instance: ce8d9b01-e99d-4051-bd96-659692a436da] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 714.380128] env[62503]: ERROR nova.compute.manager [instance: ce8d9b01-e99d-4051-bd96-659692a436da] for vif in network_info: [ 714.380128] env[62503]: ERROR nova.compute.manager [instance: ce8d9b01-e99d-4051-bd96-659692a436da] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 714.380128] env[62503]: ERROR nova.compute.manager [instance: ce8d9b01-e99d-4051-bd96-659692a436da] return self._sync_wrapper(fn, *args, **kwargs) [ 714.380128] env[62503]: ERROR nova.compute.manager [instance: ce8d9b01-e99d-4051-bd96-659692a436da] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 714.380128] env[62503]: ERROR nova.compute.manager [instance: ce8d9b01-e99d-4051-bd96-659692a436da] self.wait() [ 714.380128] env[62503]: ERROR nova.compute.manager [instance: ce8d9b01-e99d-4051-bd96-659692a436da] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 714.380128] env[62503]: ERROR nova.compute.manager [instance: ce8d9b01-e99d-4051-bd96-659692a436da] self[:] = self._gt.wait() [ 714.380128] env[62503]: ERROR nova.compute.manager [instance: ce8d9b01-e99d-4051-bd96-659692a436da] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 714.380128] env[62503]: ERROR nova.compute.manager [instance: ce8d9b01-e99d-4051-bd96-659692a436da] return self._exit_event.wait() [ 714.380128] env[62503]: ERROR nova.compute.manager [instance: ce8d9b01-e99d-4051-bd96-659692a436da] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 714.380128] env[62503]: ERROR nova.compute.manager [instance: ce8d9b01-e99d-4051-bd96-659692a436da] current.throw(*self._exc) [ 714.380485] env[62503]: ERROR nova.compute.manager [instance: ce8d9b01-e99d-4051-bd96-659692a436da] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 714.380485] env[62503]: ERROR nova.compute.manager [instance: ce8d9b01-e99d-4051-bd96-659692a436da] result = function(*args, **kwargs) [ 714.380485] env[62503]: ERROR nova.compute.manager [instance: ce8d9b01-e99d-4051-bd96-659692a436da] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 714.380485] env[62503]: ERROR nova.compute.manager [instance: ce8d9b01-e99d-4051-bd96-659692a436da] return func(*args, **kwargs) [ 714.380485] env[62503]: ERROR nova.compute.manager [instance: ce8d9b01-e99d-4051-bd96-659692a436da] File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 714.380485] env[62503]: ERROR nova.compute.manager [instance: ce8d9b01-e99d-4051-bd96-659692a436da] raise e [ 714.380485] env[62503]: ERROR nova.compute.manager [instance: ce8d9b01-e99d-4051-bd96-659692a436da] File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 714.380485] env[62503]: ERROR nova.compute.manager [instance: ce8d9b01-e99d-4051-bd96-659692a436da] nwinfo = self.network_api.allocate_for_instance( [ 714.380485] env[62503]: ERROR nova.compute.manager [instance: ce8d9b01-e99d-4051-bd96-659692a436da] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 714.380485] env[62503]: ERROR nova.compute.manager [instance: ce8d9b01-e99d-4051-bd96-659692a436da] created_port_ids = self._update_ports_for_instance( [ 714.380485] env[62503]: ERROR nova.compute.manager [instance: ce8d9b01-e99d-4051-bd96-659692a436da] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 714.380485] env[62503]: ERROR nova.compute.manager [instance: ce8d9b01-e99d-4051-bd96-659692a436da] with excutils.save_and_reraise_exception(): [ 714.380485] env[62503]: ERROR nova.compute.manager [instance: ce8d9b01-e99d-4051-bd96-659692a436da] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 714.380828] env[62503]: ERROR nova.compute.manager [instance: ce8d9b01-e99d-4051-bd96-659692a436da] self.force_reraise() [ 714.380828] env[62503]: ERROR nova.compute.manager [instance: ce8d9b01-e99d-4051-bd96-659692a436da] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 714.380828] env[62503]: ERROR nova.compute.manager [instance: ce8d9b01-e99d-4051-bd96-659692a436da] raise self.value [ 714.380828] env[62503]: ERROR nova.compute.manager [instance: ce8d9b01-e99d-4051-bd96-659692a436da] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 714.380828] env[62503]: ERROR nova.compute.manager [instance: ce8d9b01-e99d-4051-bd96-659692a436da] updated_port = self._update_port( [ 714.380828] env[62503]: ERROR nova.compute.manager [instance: ce8d9b01-e99d-4051-bd96-659692a436da] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 714.380828] env[62503]: ERROR nova.compute.manager [instance: ce8d9b01-e99d-4051-bd96-659692a436da] _ensure_no_port_binding_failure(port) [ 714.380828] env[62503]: ERROR nova.compute.manager [instance: ce8d9b01-e99d-4051-bd96-659692a436da] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 714.380828] env[62503]: ERROR nova.compute.manager [instance: ce8d9b01-e99d-4051-bd96-659692a436da] raise exception.PortBindingFailed(port_id=port['id']) [ 714.380828] env[62503]: ERROR nova.compute.manager [instance: ce8d9b01-e99d-4051-bd96-659692a436da] nova.exception.PortBindingFailed: Binding failed for port 80bd40c8-505a-4b91-a56d-00f1135ef5cf, please check neutron logs for more information. [ 714.380828] env[62503]: ERROR nova.compute.manager [instance: ce8d9b01-e99d-4051-bd96-659692a436da] [ 714.381259] env[62503]: INFO nova.compute.manager [None req-9a47e67e-9a92-496f-b283-39f380d35bee tempest-ServerAddressesTestJSON-1019310862 tempest-ServerAddressesTestJSON-1019310862-project-member] [instance: ce8d9b01-e99d-4051-bd96-659692a436da] Terminating instance [ 714.384409] env[62503]: DEBUG oslo_concurrency.lockutils [None req-9a47e67e-9a92-496f-b283-39f380d35bee tempest-ServerAddressesTestJSON-1019310862 tempest-ServerAddressesTestJSON-1019310862-project-member] Acquiring lock "refresh_cache-ce8d9b01-e99d-4051-bd96-659692a436da" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 714.384779] env[62503]: DEBUG oslo_concurrency.lockutils [None req-9a47e67e-9a92-496f-b283-39f380d35bee tempest-ServerAddressesTestJSON-1019310862 tempest-ServerAddressesTestJSON-1019310862-project-member] Acquired lock "refresh_cache-ce8d9b01-e99d-4051-bd96-659692a436da" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 714.385144] env[62503]: DEBUG nova.network.neutron [None req-9a47e67e-9a92-496f-b283-39f380d35bee tempest-ServerAddressesTestJSON-1019310862 tempest-ServerAddressesTestJSON-1019310862-project-member] [instance: ce8d9b01-e99d-4051-bd96-659692a436da] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 714.742056] env[62503]: DEBUG oslo_concurrency.lockutils [None req-b3d1021d-dab1-43cf-b5d3-632b4017ba90 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.471s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 714.742428] env[62503]: DEBUG nova.compute.manager [None req-b3d1021d-dab1-43cf-b5d3-632b4017ba90 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] [instance: 3178d5cd-1937-422b-9287-970d095aa452] Start building networks asynchronously for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2832}} [ 714.745147] env[62503]: DEBUG oslo_concurrency.lockutils [None req-53bd46c4-9392-486d-a978-bb7729f67092 tempest-ServerActionsV293TestJSON-923794683 tempest-ServerActionsV293TestJSON-923794683-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.700s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 714.746655] env[62503]: INFO nova.compute.claims [None req-53bd46c4-9392-486d-a978-bb7729f67092 tempest-ServerActionsV293TestJSON-923794683 tempest-ServerActionsV293TestJSON-923794683-project-member] [instance: 97ac40d6-1c29-4282-86e5-be27a20cf5e0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 714.907300] env[62503]: DEBUG nova.network.neutron [None req-9a47e67e-9a92-496f-b283-39f380d35bee tempest-ServerAddressesTestJSON-1019310862 tempest-ServerAddressesTestJSON-1019310862-project-member] [instance: ce8d9b01-e99d-4051-bd96-659692a436da] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 715.007386] env[62503]: DEBUG nova.network.neutron [None req-9a47e67e-9a92-496f-b283-39f380d35bee tempest-ServerAddressesTestJSON-1019310862 tempest-ServerAddressesTestJSON-1019310862-project-member] [instance: ce8d9b01-e99d-4051-bd96-659692a436da] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 715.106468] env[62503]: DEBUG nova.compute.manager [req-87335252-77cb-4254-8118-5cc18833ef00 req-655dec6f-a7d0-4e7f-90d5-171cf1c13f47 service nova] [instance: ce8d9b01-e99d-4051-bd96-659692a436da] Received event network-changed-80bd40c8-505a-4b91-a56d-00f1135ef5cf {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 715.106715] env[62503]: DEBUG nova.compute.manager [req-87335252-77cb-4254-8118-5cc18833ef00 req-655dec6f-a7d0-4e7f-90d5-171cf1c13f47 service nova] [instance: ce8d9b01-e99d-4051-bd96-659692a436da] Refreshing instance network info cache due to event network-changed-80bd40c8-505a-4b91-a56d-00f1135ef5cf. {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11432}} [ 715.106863] env[62503]: DEBUG oslo_concurrency.lockutils [req-87335252-77cb-4254-8118-5cc18833ef00 req-655dec6f-a7d0-4e7f-90d5-171cf1c13f47 service nova] Acquiring lock "refresh_cache-ce8d9b01-e99d-4051-bd96-659692a436da" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 715.251331] env[62503]: DEBUG nova.compute.utils [None req-b3d1021d-dab1-43cf-b5d3-632b4017ba90 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] Using /dev/sd instead of None {{(pid=62503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 715.255026] env[62503]: DEBUG nova.compute.manager [None req-b3d1021d-dab1-43cf-b5d3-632b4017ba90 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] [instance: 3178d5cd-1937-422b-9287-970d095aa452] Allocating IP information in the background. {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 715.255155] env[62503]: DEBUG nova.network.neutron [None req-b3d1021d-dab1-43cf-b5d3-632b4017ba90 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] [instance: 3178d5cd-1937-422b-9287-970d095aa452] allocate_for_instance() {{(pid=62503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 715.301763] env[62503]: DEBUG nova.policy [None req-b3d1021d-dab1-43cf-b5d3-632b4017ba90 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '410af55086a5487ea0e284705060593d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '622a848b6d634ad09e3a25a19ebb4916', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62503) authorize /opt/stack/nova/nova/policy.py:201}} [ 715.510518] env[62503]: DEBUG oslo_concurrency.lockutils [None req-9a47e67e-9a92-496f-b283-39f380d35bee tempest-ServerAddressesTestJSON-1019310862 tempest-ServerAddressesTestJSON-1019310862-project-member] Releasing lock "refresh_cache-ce8d9b01-e99d-4051-bd96-659692a436da" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 715.510989] env[62503]: DEBUG nova.compute.manager [None req-9a47e67e-9a92-496f-b283-39f380d35bee tempest-ServerAddressesTestJSON-1019310862 tempest-ServerAddressesTestJSON-1019310862-project-member] [instance: ce8d9b01-e99d-4051-bd96-659692a436da] Start destroying the instance on the hypervisor. {{(pid=62503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3156}} [ 715.511254] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-9a47e67e-9a92-496f-b283-39f380d35bee tempest-ServerAddressesTestJSON-1019310862 tempest-ServerAddressesTestJSON-1019310862-project-member] [instance: ce8d9b01-e99d-4051-bd96-659692a436da] Destroying instance {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 715.511609] env[62503]: DEBUG oslo_concurrency.lockutils [req-87335252-77cb-4254-8118-5cc18833ef00 req-655dec6f-a7d0-4e7f-90d5-171cf1c13f47 service nova] Acquired lock "refresh_cache-ce8d9b01-e99d-4051-bd96-659692a436da" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 715.511807] env[62503]: DEBUG nova.network.neutron [req-87335252-77cb-4254-8118-5cc18833ef00 req-655dec6f-a7d0-4e7f-90d5-171cf1c13f47 service nova] [instance: ce8d9b01-e99d-4051-bd96-659692a436da] Refreshing network info cache for port 80bd40c8-505a-4b91-a56d-00f1135ef5cf {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 715.512886] env[62503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-043c5ddb-96c4-492b-b618-e256dc907f3d {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.522006] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fb54e11-ecc1-422b-a697-7d2cb9e5cba3 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.547802] env[62503]: WARNING nova.virt.vmwareapi.vmops [None req-9a47e67e-9a92-496f-b283-39f380d35bee tempest-ServerAddressesTestJSON-1019310862 tempest-ServerAddressesTestJSON-1019310862-project-member] [instance: ce8d9b01-e99d-4051-bd96-659692a436da] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance ce8d9b01-e99d-4051-bd96-659692a436da could not be found. [ 715.548038] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-9a47e67e-9a92-496f-b283-39f380d35bee tempest-ServerAddressesTestJSON-1019310862 tempest-ServerAddressesTestJSON-1019310862-project-member] [instance: ce8d9b01-e99d-4051-bd96-659692a436da] Instance destroyed {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 715.548363] env[62503]: INFO nova.compute.manager [None req-9a47e67e-9a92-496f-b283-39f380d35bee tempest-ServerAddressesTestJSON-1019310862 tempest-ServerAddressesTestJSON-1019310862-project-member] [instance: ce8d9b01-e99d-4051-bd96-659692a436da] Took 0.04 seconds to destroy the instance on the hypervisor. [ 715.548662] env[62503]: DEBUG oslo.service.loopingcall [None req-9a47e67e-9a92-496f-b283-39f380d35bee tempest-ServerAddressesTestJSON-1019310862 tempest-ServerAddressesTestJSON-1019310862-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 715.548888] env[62503]: DEBUG nova.compute.manager [-] [instance: ce8d9b01-e99d-4051-bd96-659692a436da] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 715.548982] env[62503]: DEBUG nova.network.neutron [-] [instance: ce8d9b01-e99d-4051-bd96-659692a436da] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 715.568363] env[62503]: DEBUG nova.network.neutron [-] [instance: ce8d9b01-e99d-4051-bd96-659692a436da] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 715.607040] env[62503]: DEBUG nova.network.neutron [None req-b3d1021d-dab1-43cf-b5d3-632b4017ba90 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] [instance: 3178d5cd-1937-422b-9287-970d095aa452] Successfully created port: 75a6eeb4-800f-4551-bad8-cad36b81a1c2 {{(pid=62503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 715.755938] env[62503]: DEBUG nova.compute.manager [None req-b3d1021d-dab1-43cf-b5d3-632b4017ba90 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] [instance: 3178d5cd-1937-422b-9287-970d095aa452] Start building block device mappings for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2867}} [ 716.034035] env[62503]: DEBUG nova.network.neutron [req-87335252-77cb-4254-8118-5cc18833ef00 req-655dec6f-a7d0-4e7f-90d5-171cf1c13f47 service nova] [instance: ce8d9b01-e99d-4051-bd96-659692a436da] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 716.070825] env[62503]: DEBUG nova.network.neutron [-] [instance: ce8d9b01-e99d-4051-bd96-659692a436da] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 716.117873] env[62503]: DEBUG nova.network.neutron [req-87335252-77cb-4254-8118-5cc18833ef00 req-655dec6f-a7d0-4e7f-90d5-171cf1c13f47 service nova] [instance: ce8d9b01-e99d-4051-bd96-659692a436da] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 716.156667] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06c4c9cf-45c8-4e26-b249-4bdd62276a86 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.164867] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbbac958-faf8-40bc-90f4-a1c50d552346 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.194935] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be27bffa-e6bc-4925-a33a-7eaad70a5d12 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.202106] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acf43f1c-440a-4f44-b6ec-559d0ff77c1e {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.214979] env[62503]: DEBUG nova.compute.provider_tree [None req-53bd46c4-9392-486d-a978-bb7729f67092 tempest-ServerActionsV293TestJSON-923794683 tempest-ServerActionsV293TestJSON-923794683-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 716.574778] env[62503]: INFO nova.compute.manager [-] [instance: ce8d9b01-e99d-4051-bd96-659692a436da] Took 1.03 seconds to deallocate network for instance. [ 716.577527] env[62503]: DEBUG nova.compute.claims [None req-9a47e67e-9a92-496f-b283-39f380d35bee tempest-ServerAddressesTestJSON-1019310862 tempest-ServerAddressesTestJSON-1019310862-project-member] [instance: ce8d9b01-e99d-4051-bd96-659692a436da] Aborting claim: {{(pid=62503) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 716.577709] env[62503]: DEBUG oslo_concurrency.lockutils [None req-9a47e67e-9a92-496f-b283-39f380d35bee tempest-ServerAddressesTestJSON-1019310862 tempest-ServerAddressesTestJSON-1019310862-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 716.586031] env[62503]: ERROR nova.compute.manager [None req-b3d1021d-dab1-43cf-b5d3-632b4017ba90 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 75a6eeb4-800f-4551-bad8-cad36b81a1c2, please check neutron logs for more information. [ 716.586031] env[62503]: ERROR nova.compute.manager Traceback (most recent call last): [ 716.586031] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 716.586031] env[62503]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 716.586031] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 716.586031] env[62503]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 716.586031] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 716.586031] env[62503]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 716.586031] env[62503]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 716.586031] env[62503]: ERROR nova.compute.manager self.force_reraise() [ 716.586031] env[62503]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 716.586031] env[62503]: ERROR nova.compute.manager raise self.value [ 716.586031] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 716.586031] env[62503]: ERROR nova.compute.manager updated_port = self._update_port( [ 716.586031] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 716.586031] env[62503]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 716.586495] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 716.586495] env[62503]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 716.586495] env[62503]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 75a6eeb4-800f-4551-bad8-cad36b81a1c2, please check neutron logs for more information. [ 716.586495] env[62503]: ERROR nova.compute.manager [ 716.586495] env[62503]: Traceback (most recent call last): [ 716.586495] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 716.586495] env[62503]: listener.cb(fileno) [ 716.586495] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 716.586495] env[62503]: result = function(*args, **kwargs) [ 716.586495] env[62503]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 716.586495] env[62503]: return func(*args, **kwargs) [ 716.586495] env[62503]: File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 716.586495] env[62503]: raise e [ 716.586495] env[62503]: File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 716.586495] env[62503]: nwinfo = self.network_api.allocate_for_instance( [ 716.586495] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 716.586495] env[62503]: created_port_ids = self._update_ports_for_instance( [ 716.586495] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 716.586495] env[62503]: with excutils.save_and_reraise_exception(): [ 716.586495] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 716.586495] env[62503]: self.force_reraise() [ 716.586495] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 716.586495] env[62503]: raise self.value [ 716.586495] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 716.586495] env[62503]: updated_port = self._update_port( [ 716.586495] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 716.586495] env[62503]: _ensure_no_port_binding_failure(port) [ 716.586495] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 716.586495] env[62503]: raise exception.PortBindingFailed(port_id=port['id']) [ 716.587253] env[62503]: nova.exception.PortBindingFailed: Binding failed for port 75a6eeb4-800f-4551-bad8-cad36b81a1c2, please check neutron logs for more information. [ 716.587253] env[62503]: Removing descriptor: 14 [ 716.621972] env[62503]: DEBUG oslo_concurrency.lockutils [req-87335252-77cb-4254-8118-5cc18833ef00 req-655dec6f-a7d0-4e7f-90d5-171cf1c13f47 service nova] Releasing lock "refresh_cache-ce8d9b01-e99d-4051-bd96-659692a436da" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 716.622255] env[62503]: DEBUG nova.compute.manager [req-87335252-77cb-4254-8118-5cc18833ef00 req-655dec6f-a7d0-4e7f-90d5-171cf1c13f47 service nova] [instance: ce8d9b01-e99d-4051-bd96-659692a436da] Received event network-vif-deleted-80bd40c8-505a-4b91-a56d-00f1135ef5cf {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 716.717715] env[62503]: DEBUG nova.scheduler.client.report [None req-53bd46c4-9392-486d-a978-bb7729f67092 tempest-ServerActionsV293TestJSON-923794683 tempest-ServerActionsV293TestJSON-923794683-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 716.771030] env[62503]: DEBUG nova.compute.manager [None req-b3d1021d-dab1-43cf-b5d3-632b4017ba90 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] [instance: 3178d5cd-1937-422b-9287-970d095aa452] Start spawning the instance on the hypervisor. {{(pid=62503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2641}} [ 716.797554] env[62503]: DEBUG nova.virt.hardware [None req-b3d1021d-dab1-43cf-b5d3-632b4017ba90 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:26:20Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:26:03Z,direct_url=,disk_format='vmdk',id=8150ca02-f879-471d-8913-459408f127a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26d9ee75d25b4018b8bb1fb11c8bd98c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:26:04Z,virtual_size=,visibility=), allow threads: False {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 716.797825] env[62503]: DEBUG nova.virt.hardware [None req-b3d1021d-dab1-43cf-b5d3-632b4017ba90 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] Flavor limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 716.798009] env[62503]: DEBUG nova.virt.hardware [None req-b3d1021d-dab1-43cf-b5d3-632b4017ba90 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] Image limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 716.798343] env[62503]: DEBUG nova.virt.hardware [None req-b3d1021d-dab1-43cf-b5d3-632b4017ba90 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] Flavor pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 716.798412] env[62503]: DEBUG nova.virt.hardware [None req-b3d1021d-dab1-43cf-b5d3-632b4017ba90 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] Image pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 716.798526] env[62503]: DEBUG nova.virt.hardware [None req-b3d1021d-dab1-43cf-b5d3-632b4017ba90 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 716.798726] env[62503]: DEBUG nova.virt.hardware [None req-b3d1021d-dab1-43cf-b5d3-632b4017ba90 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 716.798881] env[62503]: DEBUG nova.virt.hardware [None req-b3d1021d-dab1-43cf-b5d3-632b4017ba90 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 716.799064] env[62503]: DEBUG nova.virt.hardware [None req-b3d1021d-dab1-43cf-b5d3-632b4017ba90 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] Got 1 possible topologies {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 716.799254] env[62503]: DEBUG nova.virt.hardware [None req-b3d1021d-dab1-43cf-b5d3-632b4017ba90 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 716.799433] env[62503]: DEBUG nova.virt.hardware [None req-b3d1021d-dab1-43cf-b5d3-632b4017ba90 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 716.800609] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ac82fae-5730-4d79-b0ed-e74ed97d65f3 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.808217] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aea5497b-9681-45ef-898f-28207e8501a5 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.821477] env[62503]: ERROR nova.compute.manager [None req-b3d1021d-dab1-43cf-b5d3-632b4017ba90 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] [instance: 3178d5cd-1937-422b-9287-970d095aa452] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 75a6eeb4-800f-4551-bad8-cad36b81a1c2, please check neutron logs for more information. [ 716.821477] env[62503]: ERROR nova.compute.manager [instance: 3178d5cd-1937-422b-9287-970d095aa452] Traceback (most recent call last): [ 716.821477] env[62503]: ERROR nova.compute.manager [instance: 3178d5cd-1937-422b-9287-970d095aa452] File "/opt/stack/nova/nova/compute/manager.py", line 2897, in _build_resources [ 716.821477] env[62503]: ERROR nova.compute.manager [instance: 3178d5cd-1937-422b-9287-970d095aa452] yield resources [ 716.821477] env[62503]: ERROR nova.compute.manager [instance: 3178d5cd-1937-422b-9287-970d095aa452] File "/opt/stack/nova/nova/compute/manager.py", line 2644, in _build_and_run_instance [ 716.821477] env[62503]: ERROR nova.compute.manager [instance: 3178d5cd-1937-422b-9287-970d095aa452] self.driver.spawn(context, instance, image_meta, [ 716.821477] env[62503]: ERROR nova.compute.manager [instance: 3178d5cd-1937-422b-9287-970d095aa452] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 716.821477] env[62503]: ERROR nova.compute.manager [instance: 3178d5cd-1937-422b-9287-970d095aa452] self._vmops.spawn(context, instance, image_meta, injected_files, [ 716.821477] env[62503]: ERROR nova.compute.manager [instance: 3178d5cd-1937-422b-9287-970d095aa452] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 716.821477] env[62503]: ERROR nova.compute.manager [instance: 3178d5cd-1937-422b-9287-970d095aa452] vm_ref = self.build_virtual_machine(instance, [ 716.821477] env[62503]: ERROR nova.compute.manager [instance: 3178d5cd-1937-422b-9287-970d095aa452] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 716.821833] env[62503]: ERROR nova.compute.manager [instance: 3178d5cd-1937-422b-9287-970d095aa452] vif_infos = vmwarevif.get_vif_info(self._session, [ 716.821833] env[62503]: ERROR nova.compute.manager [instance: 3178d5cd-1937-422b-9287-970d095aa452] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 716.821833] env[62503]: ERROR nova.compute.manager [instance: 3178d5cd-1937-422b-9287-970d095aa452] for vif in network_info: [ 716.821833] env[62503]: ERROR nova.compute.manager [instance: 3178d5cd-1937-422b-9287-970d095aa452] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 716.821833] env[62503]: ERROR nova.compute.manager [instance: 3178d5cd-1937-422b-9287-970d095aa452] return self._sync_wrapper(fn, *args, **kwargs) [ 716.821833] env[62503]: ERROR nova.compute.manager [instance: 3178d5cd-1937-422b-9287-970d095aa452] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 716.821833] env[62503]: ERROR nova.compute.manager [instance: 3178d5cd-1937-422b-9287-970d095aa452] self.wait() [ 716.821833] env[62503]: ERROR nova.compute.manager [instance: 3178d5cd-1937-422b-9287-970d095aa452] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 716.821833] env[62503]: ERROR nova.compute.manager [instance: 3178d5cd-1937-422b-9287-970d095aa452] self[:] = self._gt.wait() [ 716.821833] env[62503]: ERROR nova.compute.manager [instance: 3178d5cd-1937-422b-9287-970d095aa452] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 716.821833] env[62503]: ERROR nova.compute.manager [instance: 3178d5cd-1937-422b-9287-970d095aa452] return self._exit_event.wait() [ 716.821833] env[62503]: ERROR nova.compute.manager [instance: 3178d5cd-1937-422b-9287-970d095aa452] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 716.821833] env[62503]: ERROR nova.compute.manager [instance: 3178d5cd-1937-422b-9287-970d095aa452] current.throw(*self._exc) [ 716.822169] env[62503]: ERROR nova.compute.manager [instance: 3178d5cd-1937-422b-9287-970d095aa452] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 716.822169] env[62503]: ERROR nova.compute.manager [instance: 3178d5cd-1937-422b-9287-970d095aa452] result = function(*args, **kwargs) [ 716.822169] env[62503]: ERROR nova.compute.manager [instance: 3178d5cd-1937-422b-9287-970d095aa452] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 716.822169] env[62503]: ERROR nova.compute.manager [instance: 3178d5cd-1937-422b-9287-970d095aa452] return func(*args, **kwargs) [ 716.822169] env[62503]: ERROR nova.compute.manager [instance: 3178d5cd-1937-422b-9287-970d095aa452] File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 716.822169] env[62503]: ERROR nova.compute.manager [instance: 3178d5cd-1937-422b-9287-970d095aa452] raise e [ 716.822169] env[62503]: ERROR nova.compute.manager [instance: 3178d5cd-1937-422b-9287-970d095aa452] File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 716.822169] env[62503]: ERROR nova.compute.manager [instance: 3178d5cd-1937-422b-9287-970d095aa452] nwinfo = self.network_api.allocate_for_instance( [ 716.822169] env[62503]: ERROR nova.compute.manager [instance: 3178d5cd-1937-422b-9287-970d095aa452] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 716.822169] env[62503]: ERROR nova.compute.manager [instance: 3178d5cd-1937-422b-9287-970d095aa452] created_port_ids = self._update_ports_for_instance( [ 716.822169] env[62503]: ERROR nova.compute.manager [instance: 3178d5cd-1937-422b-9287-970d095aa452] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 716.822169] env[62503]: ERROR nova.compute.manager [instance: 3178d5cd-1937-422b-9287-970d095aa452] with excutils.save_and_reraise_exception(): [ 716.822169] env[62503]: ERROR nova.compute.manager [instance: 3178d5cd-1937-422b-9287-970d095aa452] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 716.822499] env[62503]: ERROR nova.compute.manager [instance: 3178d5cd-1937-422b-9287-970d095aa452] self.force_reraise() [ 716.822499] env[62503]: ERROR nova.compute.manager [instance: 3178d5cd-1937-422b-9287-970d095aa452] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 716.822499] env[62503]: ERROR nova.compute.manager [instance: 3178d5cd-1937-422b-9287-970d095aa452] raise self.value [ 716.822499] env[62503]: ERROR nova.compute.manager [instance: 3178d5cd-1937-422b-9287-970d095aa452] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 716.822499] env[62503]: ERROR nova.compute.manager [instance: 3178d5cd-1937-422b-9287-970d095aa452] updated_port = self._update_port( [ 716.822499] env[62503]: ERROR nova.compute.manager [instance: 3178d5cd-1937-422b-9287-970d095aa452] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 716.822499] env[62503]: ERROR nova.compute.manager [instance: 3178d5cd-1937-422b-9287-970d095aa452] _ensure_no_port_binding_failure(port) [ 716.822499] env[62503]: ERROR nova.compute.manager [instance: 3178d5cd-1937-422b-9287-970d095aa452] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 716.822499] env[62503]: ERROR nova.compute.manager [instance: 3178d5cd-1937-422b-9287-970d095aa452] raise exception.PortBindingFailed(port_id=port['id']) [ 716.822499] env[62503]: ERROR nova.compute.manager [instance: 3178d5cd-1937-422b-9287-970d095aa452] nova.exception.PortBindingFailed: Binding failed for port 75a6eeb4-800f-4551-bad8-cad36b81a1c2, please check neutron logs for more information. [ 716.822499] env[62503]: ERROR nova.compute.manager [instance: 3178d5cd-1937-422b-9287-970d095aa452] [ 716.822499] env[62503]: INFO nova.compute.manager [None req-b3d1021d-dab1-43cf-b5d3-632b4017ba90 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] [instance: 3178d5cd-1937-422b-9287-970d095aa452] Terminating instance [ 716.823853] env[62503]: DEBUG oslo_concurrency.lockutils [None req-b3d1021d-dab1-43cf-b5d3-632b4017ba90 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] Acquiring lock "refresh_cache-3178d5cd-1937-422b-9287-970d095aa452" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 716.824029] env[62503]: DEBUG oslo_concurrency.lockutils [None req-b3d1021d-dab1-43cf-b5d3-632b4017ba90 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] Acquired lock "refresh_cache-3178d5cd-1937-422b-9287-970d095aa452" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 716.824201] env[62503]: DEBUG nova.network.neutron [None req-b3d1021d-dab1-43cf-b5d3-632b4017ba90 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] [instance: 3178d5cd-1937-422b-9287-970d095aa452] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 717.138773] env[62503]: DEBUG nova.compute.manager [req-ea2c1dd5-e916-40bb-9ecb-992457ad0aa7 req-65a30a90-82e7-4f1c-96fd-18722db89f9d service nova] [instance: 3178d5cd-1937-422b-9287-970d095aa452] Received event network-changed-75a6eeb4-800f-4551-bad8-cad36b81a1c2 {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 717.139055] env[62503]: DEBUG nova.compute.manager [req-ea2c1dd5-e916-40bb-9ecb-992457ad0aa7 req-65a30a90-82e7-4f1c-96fd-18722db89f9d service nova] [instance: 3178d5cd-1937-422b-9287-970d095aa452] Refreshing instance network info cache due to event network-changed-75a6eeb4-800f-4551-bad8-cad36b81a1c2. {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11432}} [ 717.139463] env[62503]: DEBUG oslo_concurrency.lockutils [req-ea2c1dd5-e916-40bb-9ecb-992457ad0aa7 req-65a30a90-82e7-4f1c-96fd-18722db89f9d service nova] Acquiring lock "refresh_cache-3178d5cd-1937-422b-9287-970d095aa452" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 717.224023] env[62503]: DEBUG oslo_concurrency.lockutils [None req-53bd46c4-9392-486d-a978-bb7729f67092 tempest-ServerActionsV293TestJSON-923794683 tempest-ServerActionsV293TestJSON-923794683-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.478s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 717.224023] env[62503]: DEBUG nova.compute.manager [None req-53bd46c4-9392-486d-a978-bb7729f67092 tempest-ServerActionsV293TestJSON-923794683 tempest-ServerActionsV293TestJSON-923794683-project-member] [instance: 97ac40d6-1c29-4282-86e5-be27a20cf5e0] Start building networks asynchronously for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2832}} [ 717.226520] env[62503]: DEBUG oslo_concurrency.lockutils [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.468s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 717.342807] env[62503]: DEBUG nova.network.neutron [None req-b3d1021d-dab1-43cf-b5d3-632b4017ba90 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] [instance: 3178d5cd-1937-422b-9287-970d095aa452] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 717.425355] env[62503]: DEBUG nova.network.neutron [None req-b3d1021d-dab1-43cf-b5d3-632b4017ba90 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] [instance: 3178d5cd-1937-422b-9287-970d095aa452] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 717.731209] env[62503]: DEBUG nova.compute.utils [None req-53bd46c4-9392-486d-a978-bb7729f67092 tempest-ServerActionsV293TestJSON-923794683 tempest-ServerActionsV293TestJSON-923794683-project-member] Using /dev/sd instead of None {{(pid=62503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 717.736022] env[62503]: DEBUG nova.compute.manager [None req-53bd46c4-9392-486d-a978-bb7729f67092 tempest-ServerActionsV293TestJSON-923794683 tempest-ServerActionsV293TestJSON-923794683-project-member] [instance: 97ac40d6-1c29-4282-86e5-be27a20cf5e0] Allocating IP information in the background. {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 717.736022] env[62503]: DEBUG nova.network.neutron [None req-53bd46c4-9392-486d-a978-bb7729f67092 tempest-ServerActionsV293TestJSON-923794683 tempest-ServerActionsV293TestJSON-923794683-project-member] [instance: 97ac40d6-1c29-4282-86e5-be27a20cf5e0] allocate_for_instance() {{(pid=62503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 717.775492] env[62503]: DEBUG nova.policy [None req-53bd46c4-9392-486d-a978-bb7729f67092 tempest-ServerActionsV293TestJSON-923794683 tempest-ServerActionsV293TestJSON-923794683-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd7f5c7a4ae254cd5a4d50b88dfb1ee03', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2eee907e87ae476fa672848fea290bfd', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62503) authorize /opt/stack/nova/nova/policy.py:201}} [ 717.928549] env[62503]: DEBUG oslo_concurrency.lockutils [None req-b3d1021d-dab1-43cf-b5d3-632b4017ba90 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] Releasing lock "refresh_cache-3178d5cd-1937-422b-9287-970d095aa452" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 717.929572] env[62503]: DEBUG nova.compute.manager [None req-b3d1021d-dab1-43cf-b5d3-632b4017ba90 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] [instance: 3178d5cd-1937-422b-9287-970d095aa452] Start destroying the instance on the hypervisor. {{(pid=62503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3156}} [ 717.929572] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-b3d1021d-dab1-43cf-b5d3-632b4017ba90 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] [instance: 3178d5cd-1937-422b-9287-970d095aa452] Destroying instance {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 717.929763] env[62503]: DEBUG oslo_concurrency.lockutils [req-ea2c1dd5-e916-40bb-9ecb-992457ad0aa7 req-65a30a90-82e7-4f1c-96fd-18722db89f9d service nova] Acquired lock "refresh_cache-3178d5cd-1937-422b-9287-970d095aa452" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 717.930264] env[62503]: DEBUG nova.network.neutron [req-ea2c1dd5-e916-40bb-9ecb-992457ad0aa7 req-65a30a90-82e7-4f1c-96fd-18722db89f9d service nova] [instance: 3178d5cd-1937-422b-9287-970d095aa452] Refreshing network info cache for port 75a6eeb4-800f-4551-bad8-cad36b81a1c2 {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 717.931555] env[62503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c6b8c1a0-82eb-4156-8149-348157df0e58 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.946079] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff54f9e7-c426-4914-9536-b64dad32cd92 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.971967] env[62503]: WARNING nova.virt.vmwareapi.vmops [None req-b3d1021d-dab1-43cf-b5d3-632b4017ba90 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] [instance: 3178d5cd-1937-422b-9287-970d095aa452] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 3178d5cd-1937-422b-9287-970d095aa452 could not be found. [ 717.972222] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-b3d1021d-dab1-43cf-b5d3-632b4017ba90 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] [instance: 3178d5cd-1937-422b-9287-970d095aa452] Instance destroyed {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 717.972404] env[62503]: INFO nova.compute.manager [None req-b3d1021d-dab1-43cf-b5d3-632b4017ba90 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] [instance: 3178d5cd-1937-422b-9287-970d095aa452] Took 0.04 seconds to destroy the instance on the hypervisor. [ 717.972642] env[62503]: DEBUG oslo.service.loopingcall [None req-b3d1021d-dab1-43cf-b5d3-632b4017ba90 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 717.974978] env[62503]: DEBUG nova.compute.manager [-] [instance: 3178d5cd-1937-422b-9287-970d095aa452] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 717.975095] env[62503]: DEBUG nova.network.neutron [-] [instance: 3178d5cd-1937-422b-9287-970d095aa452] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 717.993911] env[62503]: DEBUG nova.network.neutron [-] [instance: 3178d5cd-1937-422b-9287-970d095aa452] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 718.086831] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbb19aed-fdd5-4548-a359-250f855f7430 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.090120] env[62503]: DEBUG nova.network.neutron [None req-53bd46c4-9392-486d-a978-bb7729f67092 tempest-ServerActionsV293TestJSON-923794683 tempest-ServerActionsV293TestJSON-923794683-project-member] [instance: 97ac40d6-1c29-4282-86e5-be27a20cf5e0] Successfully created port: 96ba244c-f6e2-4f7d-b56a-dd29b0d7721b {{(pid=62503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 718.096509] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71111f02-3d71-49f3-a4f6-cff4dac34719 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.125998] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b850088-e8e9-4e98-9376-c723ea96c169 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.133578] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c536755e-d0d0-475c-be96-e22c8a722c7e {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.146425] env[62503]: DEBUG nova.compute.provider_tree [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 718.237036] env[62503]: DEBUG nova.compute.manager [None req-53bd46c4-9392-486d-a978-bb7729f67092 tempest-ServerActionsV293TestJSON-923794683 tempest-ServerActionsV293TestJSON-923794683-project-member] [instance: 97ac40d6-1c29-4282-86e5-be27a20cf5e0] Start building block device mappings for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2867}} [ 718.460199] env[62503]: DEBUG nova.network.neutron [req-ea2c1dd5-e916-40bb-9ecb-992457ad0aa7 req-65a30a90-82e7-4f1c-96fd-18722db89f9d service nova] [instance: 3178d5cd-1937-422b-9287-970d095aa452] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 718.497340] env[62503]: DEBUG nova.network.neutron [-] [instance: 3178d5cd-1937-422b-9287-970d095aa452] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 718.559062] env[62503]: DEBUG nova.network.neutron [req-ea2c1dd5-e916-40bb-9ecb-992457ad0aa7 req-65a30a90-82e7-4f1c-96fd-18722db89f9d service nova] [instance: 3178d5cd-1937-422b-9287-970d095aa452] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 718.649655] env[62503]: DEBUG nova.scheduler.client.report [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 718.741491] env[62503]: INFO nova.virt.block_device [None req-53bd46c4-9392-486d-a978-bb7729f67092 tempest-ServerActionsV293TestJSON-923794683 tempest-ServerActionsV293TestJSON-923794683-project-member] [instance: 97ac40d6-1c29-4282-86e5-be27a20cf5e0] Booting with volume 0b1120cf-beb6-43ce-82ea-76850b9fed7d at /dev/sda [ 718.784246] env[62503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4ee5e08a-5289-49e0-a037-60b01051437a {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.793851] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0432d61-dd1d-4ef8-a295-302ff1e8d3ea {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.818235] env[62503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3b3df7d5-84f0-4a6d-961d-a4e122ef1d0e {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.825582] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d17ab85-4e82-4d25-93d4-58874e47bf7c {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.847281] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10a5355c-13be-492a-8db3-855888be4927 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.853996] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac07884b-ecfb-4883-8c78-c342714bff0f {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.869889] env[62503]: DEBUG nova.virt.block_device [None req-53bd46c4-9392-486d-a978-bb7729f67092 tempest-ServerActionsV293TestJSON-923794683 tempest-ServerActionsV293TestJSON-923794683-project-member] [instance: 97ac40d6-1c29-4282-86e5-be27a20cf5e0] Updating existing volume attachment record: 330f90f5-b7de-4f0a-9fb2-9d59fca3b86b {{(pid=62503) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 718.999890] env[62503]: INFO nova.compute.manager [-] [instance: 3178d5cd-1937-422b-9287-970d095aa452] Took 1.02 seconds to deallocate network for instance. [ 719.001879] env[62503]: DEBUG nova.compute.claims [None req-b3d1021d-dab1-43cf-b5d3-632b4017ba90 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] [instance: 3178d5cd-1937-422b-9287-970d095aa452] Aborting claim: {{(pid=62503) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 719.002070] env[62503]: DEBUG oslo_concurrency.lockutils [None req-b3d1021d-dab1-43cf-b5d3-632b4017ba90 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 719.061972] env[62503]: DEBUG oslo_concurrency.lockutils [req-ea2c1dd5-e916-40bb-9ecb-992457ad0aa7 req-65a30a90-82e7-4f1c-96fd-18722db89f9d service nova] Releasing lock "refresh_cache-3178d5cd-1937-422b-9287-970d095aa452" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 719.063282] env[62503]: DEBUG nova.compute.manager [req-ea2c1dd5-e916-40bb-9ecb-992457ad0aa7 req-65a30a90-82e7-4f1c-96fd-18722db89f9d service nova] [instance: 3178d5cd-1937-422b-9287-970d095aa452] Received event network-vif-deleted-75a6eeb4-800f-4551-bad8-cad36b81a1c2 {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 719.154560] env[62503]: DEBUG oslo_concurrency.lockutils [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.928s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 719.155283] env[62503]: ERROR nova.compute.manager [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] [instance: f244a7c9-2f39-4f91-aeba-e5f36e7f79ef] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port bf8b4134-287d-4636-923d-ae78e2eb3f0e, please check neutron logs for more information. [ 719.155283] env[62503]: ERROR nova.compute.manager [instance: f244a7c9-2f39-4f91-aeba-e5f36e7f79ef] Traceback (most recent call last): [ 719.155283] env[62503]: ERROR nova.compute.manager [instance: f244a7c9-2f39-4f91-aeba-e5f36e7f79ef] File "/opt/stack/nova/nova/compute/manager.py", line 2644, in _build_and_run_instance [ 719.155283] env[62503]: ERROR nova.compute.manager [instance: f244a7c9-2f39-4f91-aeba-e5f36e7f79ef] self.driver.spawn(context, instance, image_meta, [ 719.155283] env[62503]: ERROR nova.compute.manager [instance: f244a7c9-2f39-4f91-aeba-e5f36e7f79ef] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 719.155283] env[62503]: ERROR nova.compute.manager [instance: f244a7c9-2f39-4f91-aeba-e5f36e7f79ef] self._vmops.spawn(context, instance, image_meta, injected_files, [ 719.155283] env[62503]: ERROR nova.compute.manager [instance: f244a7c9-2f39-4f91-aeba-e5f36e7f79ef] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 719.155283] env[62503]: ERROR nova.compute.manager [instance: f244a7c9-2f39-4f91-aeba-e5f36e7f79ef] vm_ref = self.build_virtual_machine(instance, [ 719.155283] env[62503]: ERROR nova.compute.manager [instance: f244a7c9-2f39-4f91-aeba-e5f36e7f79ef] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 719.155283] env[62503]: ERROR nova.compute.manager [instance: f244a7c9-2f39-4f91-aeba-e5f36e7f79ef] vif_infos = vmwarevif.get_vif_info(self._session, [ 719.155283] env[62503]: ERROR nova.compute.manager [instance: f244a7c9-2f39-4f91-aeba-e5f36e7f79ef] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 719.155688] env[62503]: ERROR nova.compute.manager [instance: f244a7c9-2f39-4f91-aeba-e5f36e7f79ef] for vif in network_info: [ 719.155688] env[62503]: ERROR nova.compute.manager [instance: f244a7c9-2f39-4f91-aeba-e5f36e7f79ef] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 719.155688] env[62503]: ERROR nova.compute.manager [instance: f244a7c9-2f39-4f91-aeba-e5f36e7f79ef] return self._sync_wrapper(fn, *args, **kwargs) [ 719.155688] env[62503]: ERROR nova.compute.manager [instance: f244a7c9-2f39-4f91-aeba-e5f36e7f79ef] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 719.155688] env[62503]: ERROR nova.compute.manager [instance: f244a7c9-2f39-4f91-aeba-e5f36e7f79ef] self.wait() [ 719.155688] env[62503]: ERROR nova.compute.manager [instance: f244a7c9-2f39-4f91-aeba-e5f36e7f79ef] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 719.155688] env[62503]: ERROR nova.compute.manager [instance: f244a7c9-2f39-4f91-aeba-e5f36e7f79ef] self[:] = self._gt.wait() [ 719.155688] env[62503]: ERROR nova.compute.manager [instance: f244a7c9-2f39-4f91-aeba-e5f36e7f79ef] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 719.155688] env[62503]: ERROR nova.compute.manager [instance: f244a7c9-2f39-4f91-aeba-e5f36e7f79ef] return self._exit_event.wait() [ 719.155688] env[62503]: ERROR nova.compute.manager [instance: f244a7c9-2f39-4f91-aeba-e5f36e7f79ef] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 719.155688] env[62503]: ERROR nova.compute.manager [instance: f244a7c9-2f39-4f91-aeba-e5f36e7f79ef] result = hub.switch() [ 719.155688] env[62503]: ERROR nova.compute.manager [instance: f244a7c9-2f39-4f91-aeba-e5f36e7f79ef] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 719.155688] env[62503]: ERROR nova.compute.manager [instance: f244a7c9-2f39-4f91-aeba-e5f36e7f79ef] return self.greenlet.switch() [ 719.156087] env[62503]: ERROR nova.compute.manager [instance: f244a7c9-2f39-4f91-aeba-e5f36e7f79ef] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 719.156087] env[62503]: ERROR nova.compute.manager [instance: f244a7c9-2f39-4f91-aeba-e5f36e7f79ef] result = function(*args, **kwargs) [ 719.156087] env[62503]: ERROR nova.compute.manager [instance: f244a7c9-2f39-4f91-aeba-e5f36e7f79ef] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 719.156087] env[62503]: ERROR nova.compute.manager [instance: f244a7c9-2f39-4f91-aeba-e5f36e7f79ef] return func(*args, **kwargs) [ 719.156087] env[62503]: ERROR nova.compute.manager [instance: f244a7c9-2f39-4f91-aeba-e5f36e7f79ef] File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 719.156087] env[62503]: ERROR nova.compute.manager [instance: f244a7c9-2f39-4f91-aeba-e5f36e7f79ef] raise e [ 719.156087] env[62503]: ERROR nova.compute.manager [instance: f244a7c9-2f39-4f91-aeba-e5f36e7f79ef] File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 719.156087] env[62503]: ERROR nova.compute.manager [instance: f244a7c9-2f39-4f91-aeba-e5f36e7f79ef] nwinfo = self.network_api.allocate_for_instance( [ 719.156087] env[62503]: ERROR nova.compute.manager [instance: f244a7c9-2f39-4f91-aeba-e5f36e7f79ef] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 719.156087] env[62503]: ERROR nova.compute.manager [instance: f244a7c9-2f39-4f91-aeba-e5f36e7f79ef] created_port_ids = self._update_ports_for_instance( [ 719.156087] env[62503]: ERROR nova.compute.manager [instance: f244a7c9-2f39-4f91-aeba-e5f36e7f79ef] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 719.156087] env[62503]: ERROR nova.compute.manager [instance: f244a7c9-2f39-4f91-aeba-e5f36e7f79ef] with excutils.save_and_reraise_exception(): [ 719.156087] env[62503]: ERROR nova.compute.manager [instance: f244a7c9-2f39-4f91-aeba-e5f36e7f79ef] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 719.156526] env[62503]: ERROR nova.compute.manager [instance: f244a7c9-2f39-4f91-aeba-e5f36e7f79ef] self.force_reraise() [ 719.156526] env[62503]: ERROR nova.compute.manager [instance: f244a7c9-2f39-4f91-aeba-e5f36e7f79ef] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 719.156526] env[62503]: ERROR nova.compute.manager [instance: f244a7c9-2f39-4f91-aeba-e5f36e7f79ef] raise self.value [ 719.156526] env[62503]: ERROR nova.compute.manager [instance: f244a7c9-2f39-4f91-aeba-e5f36e7f79ef] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 719.156526] env[62503]: ERROR nova.compute.manager [instance: f244a7c9-2f39-4f91-aeba-e5f36e7f79ef] updated_port = self._update_port( [ 719.156526] env[62503]: ERROR nova.compute.manager [instance: f244a7c9-2f39-4f91-aeba-e5f36e7f79ef] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 719.156526] env[62503]: ERROR nova.compute.manager [instance: f244a7c9-2f39-4f91-aeba-e5f36e7f79ef] _ensure_no_port_binding_failure(port) [ 719.156526] env[62503]: ERROR nova.compute.manager [instance: f244a7c9-2f39-4f91-aeba-e5f36e7f79ef] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 719.156526] env[62503]: ERROR nova.compute.manager [instance: f244a7c9-2f39-4f91-aeba-e5f36e7f79ef] raise exception.PortBindingFailed(port_id=port['id']) [ 719.156526] env[62503]: ERROR nova.compute.manager [instance: f244a7c9-2f39-4f91-aeba-e5f36e7f79ef] nova.exception.PortBindingFailed: Binding failed for port bf8b4134-287d-4636-923d-ae78e2eb3f0e, please check neutron logs for more information. [ 719.156526] env[62503]: ERROR nova.compute.manager [instance: f244a7c9-2f39-4f91-aeba-e5f36e7f79ef] [ 719.156855] env[62503]: DEBUG nova.compute.utils [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] [instance: f244a7c9-2f39-4f91-aeba-e5f36e7f79ef] Binding failed for port bf8b4134-287d-4636-923d-ae78e2eb3f0e, please check neutron logs for more information. {{(pid=62503) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 719.158020] env[62503]: DEBUG oslo_concurrency.lockutils [None req-ef39e50f-5ce8-4286-9c89-acf2f5f9a4d4 tempest-VolumesAdminNegativeTest-585860034 tempest-VolumesAdminNegativeTest-585860034-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.350s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 719.158779] env[62503]: INFO nova.compute.claims [None req-ef39e50f-5ce8-4286-9c89-acf2f5f9a4d4 tempest-VolumesAdminNegativeTest-585860034 tempest-VolumesAdminNegativeTest-585860034-project-member] [instance: 0b40f385-db0a-460c-b7fd-47e4d6afbaf9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 719.161361] env[62503]: DEBUG nova.compute.manager [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] [instance: f244a7c9-2f39-4f91-aeba-e5f36e7f79ef] Build of instance f244a7c9-2f39-4f91-aeba-e5f36e7f79ef was re-scheduled: Binding failed for port bf8b4134-287d-4636-923d-ae78e2eb3f0e, please check neutron logs for more information. {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2483}} [ 719.161794] env[62503]: DEBUG nova.compute.manager [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] [instance: f244a7c9-2f39-4f91-aeba-e5f36e7f79ef] Unplugging VIFs for instance {{(pid=62503) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3009}} [ 719.162029] env[62503]: DEBUG oslo_concurrency.lockutils [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] Acquiring lock "refresh_cache-f244a7c9-2f39-4f91-aeba-e5f36e7f79ef" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 719.162180] env[62503]: DEBUG oslo_concurrency.lockutils [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] Acquired lock "refresh_cache-f244a7c9-2f39-4f91-aeba-e5f36e7f79ef" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 719.162342] env[62503]: DEBUG nova.network.neutron [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] [instance: f244a7c9-2f39-4f91-aeba-e5f36e7f79ef] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 719.175725] env[62503]: DEBUG nova.compute.manager [req-eb3e157e-1d4b-4c3e-89be-ba07c21bb887 req-dd157082-cfb0-4e29-a927-0bd3ebec11dd service nova] [instance: 97ac40d6-1c29-4282-86e5-be27a20cf5e0] Received event network-changed-96ba244c-f6e2-4f7d-b56a-dd29b0d7721b {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 719.175933] env[62503]: DEBUG nova.compute.manager [req-eb3e157e-1d4b-4c3e-89be-ba07c21bb887 req-dd157082-cfb0-4e29-a927-0bd3ebec11dd service nova] [instance: 97ac40d6-1c29-4282-86e5-be27a20cf5e0] Refreshing instance network info cache due to event network-changed-96ba244c-f6e2-4f7d-b56a-dd29b0d7721b. {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11432}} [ 719.176205] env[62503]: DEBUG oslo_concurrency.lockutils [req-eb3e157e-1d4b-4c3e-89be-ba07c21bb887 req-dd157082-cfb0-4e29-a927-0bd3ebec11dd service nova] Acquiring lock "refresh_cache-97ac40d6-1c29-4282-86e5-be27a20cf5e0" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 719.176981] env[62503]: DEBUG oslo_concurrency.lockutils [req-eb3e157e-1d4b-4c3e-89be-ba07c21bb887 req-dd157082-cfb0-4e29-a927-0bd3ebec11dd service nova] Acquired lock "refresh_cache-97ac40d6-1c29-4282-86e5-be27a20cf5e0" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 719.176981] env[62503]: DEBUG nova.network.neutron [req-eb3e157e-1d4b-4c3e-89be-ba07c21bb887 req-dd157082-cfb0-4e29-a927-0bd3ebec11dd service nova] [instance: 97ac40d6-1c29-4282-86e5-be27a20cf5e0] Refreshing network info cache for port 96ba244c-f6e2-4f7d-b56a-dd29b0d7721b {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 719.229282] env[62503]: ERROR nova.compute.manager [None req-53bd46c4-9392-486d-a978-bb7729f67092 tempest-ServerActionsV293TestJSON-923794683 tempest-ServerActionsV293TestJSON-923794683-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 96ba244c-f6e2-4f7d-b56a-dd29b0d7721b, please check neutron logs for more information. [ 719.229282] env[62503]: ERROR nova.compute.manager Traceback (most recent call last): [ 719.229282] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 719.229282] env[62503]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 719.229282] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 719.229282] env[62503]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 719.229282] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 719.229282] env[62503]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 719.229282] env[62503]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 719.229282] env[62503]: ERROR nova.compute.manager self.force_reraise() [ 719.229282] env[62503]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 719.229282] env[62503]: ERROR nova.compute.manager raise self.value [ 719.229282] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 719.229282] env[62503]: ERROR nova.compute.manager updated_port = self._update_port( [ 719.229282] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 719.229282] env[62503]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 719.229868] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 719.229868] env[62503]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 719.229868] env[62503]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 96ba244c-f6e2-4f7d-b56a-dd29b0d7721b, please check neutron logs for more information. [ 719.229868] env[62503]: ERROR nova.compute.manager [ 719.229868] env[62503]: Traceback (most recent call last): [ 719.229868] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 719.229868] env[62503]: listener.cb(fileno) [ 719.229868] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 719.229868] env[62503]: result = function(*args, **kwargs) [ 719.229868] env[62503]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 719.229868] env[62503]: return func(*args, **kwargs) [ 719.229868] env[62503]: File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 719.229868] env[62503]: raise e [ 719.229868] env[62503]: File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 719.229868] env[62503]: nwinfo = self.network_api.allocate_for_instance( [ 719.229868] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 719.229868] env[62503]: created_port_ids = self._update_ports_for_instance( [ 719.229868] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 719.229868] env[62503]: with excutils.save_and_reraise_exception(): [ 719.229868] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 719.229868] env[62503]: self.force_reraise() [ 719.229868] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 719.229868] env[62503]: raise self.value [ 719.229868] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 719.229868] env[62503]: updated_port = self._update_port( [ 719.229868] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 719.229868] env[62503]: _ensure_no_port_binding_failure(port) [ 719.229868] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 719.229868] env[62503]: raise exception.PortBindingFailed(port_id=port['id']) [ 719.230711] env[62503]: nova.exception.PortBindingFailed: Binding failed for port 96ba244c-f6e2-4f7d-b56a-dd29b0d7721b, please check neutron logs for more information. [ 719.230711] env[62503]: Removing descriptor: 14 [ 719.687721] env[62503]: DEBUG nova.network.neutron [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] [instance: f244a7c9-2f39-4f91-aeba-e5f36e7f79ef] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 719.694711] env[62503]: DEBUG nova.network.neutron [req-eb3e157e-1d4b-4c3e-89be-ba07c21bb887 req-dd157082-cfb0-4e29-a927-0bd3ebec11dd service nova] [instance: 97ac40d6-1c29-4282-86e5-be27a20cf5e0] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 719.786649] env[62503]: DEBUG nova.network.neutron [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] [instance: f244a7c9-2f39-4f91-aeba-e5f36e7f79ef] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 719.801054] env[62503]: DEBUG nova.network.neutron [req-eb3e157e-1d4b-4c3e-89be-ba07c21bb887 req-dd157082-cfb0-4e29-a927-0bd3ebec11dd service nova] [instance: 97ac40d6-1c29-4282-86e5-be27a20cf5e0] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 720.288731] env[62503]: DEBUG oslo_concurrency.lockutils [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] Releasing lock "refresh_cache-f244a7c9-2f39-4f91-aeba-e5f36e7f79ef" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 720.289063] env[62503]: DEBUG nova.compute.manager [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62503) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3032}} [ 720.289167] env[62503]: DEBUG nova.compute.manager [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] [instance: f244a7c9-2f39-4f91-aeba-e5f36e7f79ef] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 720.289407] env[62503]: DEBUG nova.network.neutron [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] [instance: f244a7c9-2f39-4f91-aeba-e5f36e7f79ef] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 720.303512] env[62503]: DEBUG oslo_concurrency.lockutils [req-eb3e157e-1d4b-4c3e-89be-ba07c21bb887 req-dd157082-cfb0-4e29-a927-0bd3ebec11dd service nova] Releasing lock "refresh_cache-97ac40d6-1c29-4282-86e5-be27a20cf5e0" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 720.304108] env[62503]: DEBUG nova.network.neutron [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] [instance: f244a7c9-2f39-4f91-aeba-e5f36e7f79ef] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 720.470890] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2814874-4b62-4f7b-be42-182cd9a2f867 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.479848] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1f7625b-39b6-467a-8aa9-5bb43f667ab0 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.510618] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0e08b57-6e61-4c5e-ba9a-0d507afd521a {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.518251] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a96ea123-4b06-4f21-a4e2-ec9ea4a45839 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.531336] env[62503]: DEBUG nova.compute.provider_tree [None req-ef39e50f-5ce8-4286-9c89-acf2f5f9a4d4 tempest-VolumesAdminNegativeTest-585860034 tempest-VolumesAdminNegativeTest-585860034-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 720.808881] env[62503]: DEBUG nova.network.neutron [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] [instance: f244a7c9-2f39-4f91-aeba-e5f36e7f79ef] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 720.972901] env[62503]: DEBUG nova.compute.manager [None req-53bd46c4-9392-486d-a978-bb7729f67092 tempest-ServerActionsV293TestJSON-923794683 tempest-ServerActionsV293TestJSON-923794683-project-member] [instance: 97ac40d6-1c29-4282-86e5-be27a20cf5e0] Start spawning the instance on the hypervisor. {{(pid=62503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2641}} [ 720.972901] env[62503]: DEBUG nova.virt.hardware [None req-53bd46c4-9392-486d-a978-bb7729f67092 tempest-ServerActionsV293TestJSON-923794683 tempest-ServerActionsV293TestJSON-923794683-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:26:20Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 720.973850] env[62503]: DEBUG nova.virt.hardware [None req-53bd46c4-9392-486d-a978-bb7729f67092 tempest-ServerActionsV293TestJSON-923794683 tempest-ServerActionsV293TestJSON-923794683-project-member] Flavor limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 720.973850] env[62503]: DEBUG nova.virt.hardware [None req-53bd46c4-9392-486d-a978-bb7729f67092 tempest-ServerActionsV293TestJSON-923794683 tempest-ServerActionsV293TestJSON-923794683-project-member] Image limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 720.973850] env[62503]: DEBUG nova.virt.hardware [None req-53bd46c4-9392-486d-a978-bb7729f67092 tempest-ServerActionsV293TestJSON-923794683 tempest-ServerActionsV293TestJSON-923794683-project-member] Flavor pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 720.973850] env[62503]: DEBUG nova.virt.hardware [None req-53bd46c4-9392-486d-a978-bb7729f67092 tempest-ServerActionsV293TestJSON-923794683 tempest-ServerActionsV293TestJSON-923794683-project-member] Image pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 720.973850] env[62503]: DEBUG nova.virt.hardware [None req-53bd46c4-9392-486d-a978-bb7729f67092 tempest-ServerActionsV293TestJSON-923794683 tempest-ServerActionsV293TestJSON-923794683-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 720.974199] env[62503]: DEBUG nova.virt.hardware [None req-53bd46c4-9392-486d-a978-bb7729f67092 tempest-ServerActionsV293TestJSON-923794683 tempest-ServerActionsV293TestJSON-923794683-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 720.974199] env[62503]: DEBUG nova.virt.hardware [None req-53bd46c4-9392-486d-a978-bb7729f67092 tempest-ServerActionsV293TestJSON-923794683 tempest-ServerActionsV293TestJSON-923794683-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 720.974280] env[62503]: DEBUG nova.virt.hardware [None req-53bd46c4-9392-486d-a978-bb7729f67092 tempest-ServerActionsV293TestJSON-923794683 tempest-ServerActionsV293TestJSON-923794683-project-member] Got 1 possible topologies {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 720.974503] env[62503]: DEBUG nova.virt.hardware [None req-53bd46c4-9392-486d-a978-bb7729f67092 tempest-ServerActionsV293TestJSON-923794683 tempest-ServerActionsV293TestJSON-923794683-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 720.974650] env[62503]: DEBUG nova.virt.hardware [None req-53bd46c4-9392-486d-a978-bb7729f67092 tempest-ServerActionsV293TestJSON-923794683 tempest-ServerActionsV293TestJSON-923794683-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 720.975649] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2c8f9db-297b-4979-a3a9-5558c46b676c {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.985274] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcd7b327-87e5-4352-8e60-d839a23d8958 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.000197] env[62503]: ERROR nova.compute.manager [None req-53bd46c4-9392-486d-a978-bb7729f67092 tempest-ServerActionsV293TestJSON-923794683 tempest-ServerActionsV293TestJSON-923794683-project-member] [instance: 97ac40d6-1c29-4282-86e5-be27a20cf5e0] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 96ba244c-f6e2-4f7d-b56a-dd29b0d7721b, please check neutron logs for more information. [ 721.000197] env[62503]: ERROR nova.compute.manager [instance: 97ac40d6-1c29-4282-86e5-be27a20cf5e0] Traceback (most recent call last): [ 721.000197] env[62503]: ERROR nova.compute.manager [instance: 97ac40d6-1c29-4282-86e5-be27a20cf5e0] File "/opt/stack/nova/nova/compute/manager.py", line 2897, in _build_resources [ 721.000197] env[62503]: ERROR nova.compute.manager [instance: 97ac40d6-1c29-4282-86e5-be27a20cf5e0] yield resources [ 721.000197] env[62503]: ERROR nova.compute.manager [instance: 97ac40d6-1c29-4282-86e5-be27a20cf5e0] File "/opt/stack/nova/nova/compute/manager.py", line 2644, in _build_and_run_instance [ 721.000197] env[62503]: ERROR nova.compute.manager [instance: 97ac40d6-1c29-4282-86e5-be27a20cf5e0] self.driver.spawn(context, instance, image_meta, [ 721.000197] env[62503]: ERROR nova.compute.manager [instance: 97ac40d6-1c29-4282-86e5-be27a20cf5e0] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 721.000197] env[62503]: ERROR nova.compute.manager [instance: 97ac40d6-1c29-4282-86e5-be27a20cf5e0] self._vmops.spawn(context, instance, image_meta, injected_files, [ 721.000197] env[62503]: ERROR nova.compute.manager [instance: 97ac40d6-1c29-4282-86e5-be27a20cf5e0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 721.000197] env[62503]: ERROR nova.compute.manager [instance: 97ac40d6-1c29-4282-86e5-be27a20cf5e0] vm_ref = self.build_virtual_machine(instance, [ 721.000197] env[62503]: ERROR nova.compute.manager [instance: 97ac40d6-1c29-4282-86e5-be27a20cf5e0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 721.000574] env[62503]: ERROR nova.compute.manager [instance: 97ac40d6-1c29-4282-86e5-be27a20cf5e0] vif_infos = vmwarevif.get_vif_info(self._session, [ 721.000574] env[62503]: ERROR nova.compute.manager [instance: 97ac40d6-1c29-4282-86e5-be27a20cf5e0] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 721.000574] env[62503]: ERROR nova.compute.manager [instance: 97ac40d6-1c29-4282-86e5-be27a20cf5e0] for vif in network_info: [ 721.000574] env[62503]: ERROR nova.compute.manager [instance: 97ac40d6-1c29-4282-86e5-be27a20cf5e0] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 721.000574] env[62503]: ERROR nova.compute.manager [instance: 97ac40d6-1c29-4282-86e5-be27a20cf5e0] return self._sync_wrapper(fn, *args, **kwargs) [ 721.000574] env[62503]: ERROR nova.compute.manager [instance: 97ac40d6-1c29-4282-86e5-be27a20cf5e0] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 721.000574] env[62503]: ERROR nova.compute.manager [instance: 97ac40d6-1c29-4282-86e5-be27a20cf5e0] self.wait() [ 721.000574] env[62503]: ERROR nova.compute.manager [instance: 97ac40d6-1c29-4282-86e5-be27a20cf5e0] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 721.000574] env[62503]: ERROR nova.compute.manager [instance: 97ac40d6-1c29-4282-86e5-be27a20cf5e0] self[:] = self._gt.wait() [ 721.000574] env[62503]: ERROR nova.compute.manager [instance: 97ac40d6-1c29-4282-86e5-be27a20cf5e0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 721.000574] env[62503]: ERROR nova.compute.manager [instance: 97ac40d6-1c29-4282-86e5-be27a20cf5e0] return self._exit_event.wait() [ 721.000574] env[62503]: ERROR nova.compute.manager [instance: 97ac40d6-1c29-4282-86e5-be27a20cf5e0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 721.000574] env[62503]: ERROR nova.compute.manager [instance: 97ac40d6-1c29-4282-86e5-be27a20cf5e0] current.throw(*self._exc) [ 721.000902] env[62503]: ERROR nova.compute.manager [instance: 97ac40d6-1c29-4282-86e5-be27a20cf5e0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 721.000902] env[62503]: ERROR nova.compute.manager [instance: 97ac40d6-1c29-4282-86e5-be27a20cf5e0] result = function(*args, **kwargs) [ 721.000902] env[62503]: ERROR nova.compute.manager [instance: 97ac40d6-1c29-4282-86e5-be27a20cf5e0] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 721.000902] env[62503]: ERROR nova.compute.manager [instance: 97ac40d6-1c29-4282-86e5-be27a20cf5e0] return func(*args, **kwargs) [ 721.000902] env[62503]: ERROR nova.compute.manager [instance: 97ac40d6-1c29-4282-86e5-be27a20cf5e0] File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 721.000902] env[62503]: ERROR nova.compute.manager [instance: 97ac40d6-1c29-4282-86e5-be27a20cf5e0] raise e [ 721.000902] env[62503]: ERROR nova.compute.manager [instance: 97ac40d6-1c29-4282-86e5-be27a20cf5e0] File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 721.000902] env[62503]: ERROR nova.compute.manager [instance: 97ac40d6-1c29-4282-86e5-be27a20cf5e0] nwinfo = self.network_api.allocate_for_instance( [ 721.000902] env[62503]: ERROR nova.compute.manager [instance: 97ac40d6-1c29-4282-86e5-be27a20cf5e0] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 721.000902] env[62503]: ERROR nova.compute.manager [instance: 97ac40d6-1c29-4282-86e5-be27a20cf5e0] created_port_ids = self._update_ports_for_instance( [ 721.000902] env[62503]: ERROR nova.compute.manager [instance: 97ac40d6-1c29-4282-86e5-be27a20cf5e0] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 721.000902] env[62503]: ERROR nova.compute.manager [instance: 97ac40d6-1c29-4282-86e5-be27a20cf5e0] with excutils.save_and_reraise_exception(): [ 721.000902] env[62503]: ERROR nova.compute.manager [instance: 97ac40d6-1c29-4282-86e5-be27a20cf5e0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 721.001257] env[62503]: ERROR nova.compute.manager [instance: 97ac40d6-1c29-4282-86e5-be27a20cf5e0] self.force_reraise() [ 721.001257] env[62503]: ERROR nova.compute.manager [instance: 97ac40d6-1c29-4282-86e5-be27a20cf5e0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 721.001257] env[62503]: ERROR nova.compute.manager [instance: 97ac40d6-1c29-4282-86e5-be27a20cf5e0] raise self.value [ 721.001257] env[62503]: ERROR nova.compute.manager [instance: 97ac40d6-1c29-4282-86e5-be27a20cf5e0] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 721.001257] env[62503]: ERROR nova.compute.manager [instance: 97ac40d6-1c29-4282-86e5-be27a20cf5e0] updated_port = self._update_port( [ 721.001257] env[62503]: ERROR nova.compute.manager [instance: 97ac40d6-1c29-4282-86e5-be27a20cf5e0] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 721.001257] env[62503]: ERROR nova.compute.manager [instance: 97ac40d6-1c29-4282-86e5-be27a20cf5e0] _ensure_no_port_binding_failure(port) [ 721.001257] env[62503]: ERROR nova.compute.manager [instance: 97ac40d6-1c29-4282-86e5-be27a20cf5e0] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 721.001257] env[62503]: ERROR nova.compute.manager [instance: 97ac40d6-1c29-4282-86e5-be27a20cf5e0] raise exception.PortBindingFailed(port_id=port['id']) [ 721.001257] env[62503]: ERROR nova.compute.manager [instance: 97ac40d6-1c29-4282-86e5-be27a20cf5e0] nova.exception.PortBindingFailed: Binding failed for port 96ba244c-f6e2-4f7d-b56a-dd29b0d7721b, please check neutron logs for more information. [ 721.001257] env[62503]: ERROR nova.compute.manager [instance: 97ac40d6-1c29-4282-86e5-be27a20cf5e0] [ 721.001257] env[62503]: INFO nova.compute.manager [None req-53bd46c4-9392-486d-a978-bb7729f67092 tempest-ServerActionsV293TestJSON-923794683 tempest-ServerActionsV293TestJSON-923794683-project-member] [instance: 97ac40d6-1c29-4282-86e5-be27a20cf5e0] Terminating instance [ 721.002741] env[62503]: DEBUG oslo_concurrency.lockutils [None req-53bd46c4-9392-486d-a978-bb7729f67092 tempest-ServerActionsV293TestJSON-923794683 tempest-ServerActionsV293TestJSON-923794683-project-member] Acquiring lock "refresh_cache-97ac40d6-1c29-4282-86e5-be27a20cf5e0" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 721.002912] env[62503]: DEBUG oslo_concurrency.lockutils [None req-53bd46c4-9392-486d-a978-bb7729f67092 tempest-ServerActionsV293TestJSON-923794683 tempest-ServerActionsV293TestJSON-923794683-project-member] Acquired lock "refresh_cache-97ac40d6-1c29-4282-86e5-be27a20cf5e0" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 721.003344] env[62503]: DEBUG nova.network.neutron [None req-53bd46c4-9392-486d-a978-bb7729f67092 tempest-ServerActionsV293TestJSON-923794683 tempest-ServerActionsV293TestJSON-923794683-project-member] [instance: 97ac40d6-1c29-4282-86e5-be27a20cf5e0] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 721.034407] env[62503]: DEBUG nova.scheduler.client.report [None req-ef39e50f-5ce8-4286-9c89-acf2f5f9a4d4 tempest-VolumesAdminNegativeTest-585860034 tempest-VolumesAdminNegativeTest-585860034-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 721.204398] env[62503]: DEBUG nova.compute.manager [req-84f15ec5-2638-4ba7-9337-d617b66542e8 req-8b46f947-9803-4c86-8682-8db9a231e169 service nova] [instance: 97ac40d6-1c29-4282-86e5-be27a20cf5e0] Received event network-vif-deleted-96ba244c-f6e2-4f7d-b56a-dd29b0d7721b {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 721.312030] env[62503]: INFO nova.compute.manager [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] [instance: f244a7c9-2f39-4f91-aeba-e5f36e7f79ef] Took 1.02 seconds to deallocate network for instance. [ 721.519101] env[62503]: DEBUG nova.network.neutron [None req-53bd46c4-9392-486d-a978-bb7729f67092 tempest-ServerActionsV293TestJSON-923794683 tempest-ServerActionsV293TestJSON-923794683-project-member] [instance: 97ac40d6-1c29-4282-86e5-be27a20cf5e0] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 721.540887] env[62503]: DEBUG oslo_concurrency.lockutils [None req-ef39e50f-5ce8-4286-9c89-acf2f5f9a4d4 tempest-VolumesAdminNegativeTest-585860034 tempest-VolumesAdminNegativeTest-585860034-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.383s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 721.541413] env[62503]: DEBUG nova.compute.manager [None req-ef39e50f-5ce8-4286-9c89-acf2f5f9a4d4 tempest-VolumesAdminNegativeTest-585860034 tempest-VolumesAdminNegativeTest-585860034-project-member] [instance: 0b40f385-db0a-460c-b7fd-47e4d6afbaf9] Start building networks asynchronously for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2832}} [ 721.544033] env[62503]: DEBUG oslo_concurrency.lockutils [None req-98b7d7bf-9e47-4fe2-b967-b3c7085ad444 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.039s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 721.545316] env[62503]: INFO nova.compute.claims [None req-98b7d7bf-9e47-4fe2-b967-b3c7085ad444 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: c1693c1f-6497-429c-a7f7-5bf5591684d1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 721.590222] env[62503]: DEBUG nova.network.neutron [None req-53bd46c4-9392-486d-a978-bb7729f67092 tempest-ServerActionsV293TestJSON-923794683 tempest-ServerActionsV293TestJSON-923794683-project-member] [instance: 97ac40d6-1c29-4282-86e5-be27a20cf5e0] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 722.052047] env[62503]: DEBUG nova.compute.utils [None req-ef39e50f-5ce8-4286-9c89-acf2f5f9a4d4 tempest-VolumesAdminNegativeTest-585860034 tempest-VolumesAdminNegativeTest-585860034-project-member] Using /dev/sd instead of None {{(pid=62503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 722.054387] env[62503]: DEBUG nova.compute.manager [None req-ef39e50f-5ce8-4286-9c89-acf2f5f9a4d4 tempest-VolumesAdminNegativeTest-585860034 tempest-VolumesAdminNegativeTest-585860034-project-member] [instance: 0b40f385-db0a-460c-b7fd-47e4d6afbaf9] Allocating IP information in the background. {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 722.054527] env[62503]: DEBUG nova.network.neutron [None req-ef39e50f-5ce8-4286-9c89-acf2f5f9a4d4 tempest-VolumesAdminNegativeTest-585860034 tempest-VolumesAdminNegativeTest-585860034-project-member] [instance: 0b40f385-db0a-460c-b7fd-47e4d6afbaf9] allocate_for_instance() {{(pid=62503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 722.092710] env[62503]: DEBUG oslo_concurrency.lockutils [None req-53bd46c4-9392-486d-a978-bb7729f67092 tempest-ServerActionsV293TestJSON-923794683 tempest-ServerActionsV293TestJSON-923794683-project-member] Releasing lock "refresh_cache-97ac40d6-1c29-4282-86e5-be27a20cf5e0" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 722.093298] env[62503]: DEBUG nova.compute.manager [None req-53bd46c4-9392-486d-a978-bb7729f67092 tempest-ServerActionsV293TestJSON-923794683 tempest-ServerActionsV293TestJSON-923794683-project-member] [instance: 97ac40d6-1c29-4282-86e5-be27a20cf5e0] Start destroying the instance on the hypervisor. {{(pid=62503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3156}} [ 722.093709] env[62503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-44609e07-4907-4f3a-a330-5c3ad6bc22d8 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.102328] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96176aca-8ac8-4322-b308-eb1effb37449 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.113457] env[62503]: DEBUG nova.policy [None req-ef39e50f-5ce8-4286-9c89-acf2f5f9a4d4 tempest-VolumesAdminNegativeTest-585860034 tempest-VolumesAdminNegativeTest-585860034-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e16287e7f4114e1b9278fa463f7a1c3b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0c65ccbbe7774e7994830d31cdbb91ae', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62503) authorize /opt/stack/nova/nova/policy.py:201}} [ 722.126803] env[62503]: WARNING nova.virt.vmwareapi.driver [None req-53bd46c4-9392-486d-a978-bb7729f67092 tempest-ServerActionsV293TestJSON-923794683 tempest-ServerActionsV293TestJSON-923794683-project-member] [instance: 97ac40d6-1c29-4282-86e5-be27a20cf5e0] Instance does not exists. Proceeding to delete instance properties on datastore: nova.exception.InstanceNotFound: Instance 97ac40d6-1c29-4282-86e5-be27a20cf5e0 could not be found. [ 722.127036] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-53bd46c4-9392-486d-a978-bb7729f67092 tempest-ServerActionsV293TestJSON-923794683 tempest-ServerActionsV293TestJSON-923794683-project-member] [instance: 97ac40d6-1c29-4282-86e5-be27a20cf5e0] Destroying instance {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 722.127321] env[62503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f44b4349-68eb-438a-b5d1-c1e9aa9823d0 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.134588] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3701103a-2042-4152-afd2-5cdff074fc69 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.155852] env[62503]: WARNING nova.virt.vmwareapi.vmops [None req-53bd46c4-9392-486d-a978-bb7729f67092 tempest-ServerActionsV293TestJSON-923794683 tempest-ServerActionsV293TestJSON-923794683-project-member] [instance: 97ac40d6-1c29-4282-86e5-be27a20cf5e0] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 97ac40d6-1c29-4282-86e5-be27a20cf5e0 could not be found. [ 722.155852] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-53bd46c4-9392-486d-a978-bb7729f67092 tempest-ServerActionsV293TestJSON-923794683 tempest-ServerActionsV293TestJSON-923794683-project-member] [instance: 97ac40d6-1c29-4282-86e5-be27a20cf5e0] Instance destroyed {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 722.155852] env[62503]: INFO nova.compute.manager [None req-53bd46c4-9392-486d-a978-bb7729f67092 tempest-ServerActionsV293TestJSON-923794683 tempest-ServerActionsV293TestJSON-923794683-project-member] [instance: 97ac40d6-1c29-4282-86e5-be27a20cf5e0] Took 0.06 seconds to destroy the instance on the hypervisor. [ 722.155852] env[62503]: DEBUG oslo.service.loopingcall [None req-53bd46c4-9392-486d-a978-bb7729f67092 tempest-ServerActionsV293TestJSON-923794683 tempest-ServerActionsV293TestJSON-923794683-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 722.156011] env[62503]: DEBUG nova.compute.manager [-] [instance: 97ac40d6-1c29-4282-86e5-be27a20cf5e0] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 722.156011] env[62503]: DEBUG nova.network.neutron [-] [instance: 97ac40d6-1c29-4282-86e5-be27a20cf5e0] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 722.179521] env[62503]: DEBUG nova.network.neutron [-] [instance: 97ac40d6-1c29-4282-86e5-be27a20cf5e0] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 722.357352] env[62503]: INFO nova.scheduler.client.report [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] Deleted allocations for instance f244a7c9-2f39-4f91-aeba-e5f36e7f79ef [ 722.437237] env[62503]: DEBUG nova.network.neutron [None req-ef39e50f-5ce8-4286-9c89-acf2f5f9a4d4 tempest-VolumesAdminNegativeTest-585860034 tempest-VolumesAdminNegativeTest-585860034-project-member] [instance: 0b40f385-db0a-460c-b7fd-47e4d6afbaf9] Successfully created port: b1a727a8-b3e3-44dd-8a7a-6fd89dcfa153 {{(pid=62503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 722.557467] env[62503]: DEBUG nova.compute.manager [None req-ef39e50f-5ce8-4286-9c89-acf2f5f9a4d4 tempest-VolumesAdminNegativeTest-585860034 tempest-VolumesAdminNegativeTest-585860034-project-member] [instance: 0b40f385-db0a-460c-b7fd-47e4d6afbaf9] Start building block device mappings for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2867}} [ 722.684524] env[62503]: DEBUG nova.network.neutron [-] [instance: 97ac40d6-1c29-4282-86e5-be27a20cf5e0] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 722.868965] env[62503]: DEBUG oslo_concurrency.lockutils [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] Lock "f244a7c9-2f39-4f91-aeba-e5f36e7f79ef" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 129.332s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 722.887792] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a01d2684-474c-4a06-a52f-c20bab542f49 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.896290] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18e9e2f8-80a1-4a84-bc3f-d99e44b8e200 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.931030] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcb4845e-7f59-4c9f-a24b-ef71edc19ef2 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.938071] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efebe7cd-e466-450a-a18d-cd09e43d5f9a {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.951486] env[62503]: DEBUG nova.compute.provider_tree [None req-98b7d7bf-9e47-4fe2-b967-b3c7085ad444 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 723.188283] env[62503]: INFO nova.compute.manager [-] [instance: 97ac40d6-1c29-4282-86e5-be27a20cf5e0] Took 1.03 seconds to deallocate network for instance. [ 723.214214] env[62503]: DEBUG nova.compute.manager [req-2b4da0ec-7099-4470-8af5-51446b1272e8 req-8c2fd23b-11f1-48f5-875e-3e4540cdb50f service nova] [instance: 0b40f385-db0a-460c-b7fd-47e4d6afbaf9] Received event network-changed-b1a727a8-b3e3-44dd-8a7a-6fd89dcfa153 {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 723.214413] env[62503]: DEBUG nova.compute.manager [req-2b4da0ec-7099-4470-8af5-51446b1272e8 req-8c2fd23b-11f1-48f5-875e-3e4540cdb50f service nova] [instance: 0b40f385-db0a-460c-b7fd-47e4d6afbaf9] Refreshing instance network info cache due to event network-changed-b1a727a8-b3e3-44dd-8a7a-6fd89dcfa153. {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11432}} [ 723.214629] env[62503]: DEBUG oslo_concurrency.lockutils [req-2b4da0ec-7099-4470-8af5-51446b1272e8 req-8c2fd23b-11f1-48f5-875e-3e4540cdb50f service nova] Acquiring lock "refresh_cache-0b40f385-db0a-460c-b7fd-47e4d6afbaf9" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 723.214767] env[62503]: DEBUG oslo_concurrency.lockutils [req-2b4da0ec-7099-4470-8af5-51446b1272e8 req-8c2fd23b-11f1-48f5-875e-3e4540cdb50f service nova] Acquired lock "refresh_cache-0b40f385-db0a-460c-b7fd-47e4d6afbaf9" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 723.214989] env[62503]: DEBUG nova.network.neutron [req-2b4da0ec-7099-4470-8af5-51446b1272e8 req-8c2fd23b-11f1-48f5-875e-3e4540cdb50f service nova] [instance: 0b40f385-db0a-460c-b7fd-47e4d6afbaf9] Refreshing network info cache for port b1a727a8-b3e3-44dd-8a7a-6fd89dcfa153 {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 723.371806] env[62503]: DEBUG nova.compute.manager [None req-d48c1997-54b2-410a-b1fd-f52e7748bdfd tempest-ServerGroupTestJSON-1883967840 tempest-ServerGroupTestJSON-1883967840-project-member] [instance: be79632e-78ca-440a-92ef-d86a9f32693e] Starting instance... {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 723.454726] env[62503]: DEBUG nova.scheduler.client.report [None req-98b7d7bf-9e47-4fe2-b967-b3c7085ad444 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 723.568182] env[62503]: DEBUG nova.compute.manager [None req-ef39e50f-5ce8-4286-9c89-acf2f5f9a4d4 tempest-VolumesAdminNegativeTest-585860034 tempest-VolumesAdminNegativeTest-585860034-project-member] [instance: 0b40f385-db0a-460c-b7fd-47e4d6afbaf9] Start spawning the instance on the hypervisor. {{(pid=62503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2641}} [ 723.594609] env[62503]: ERROR nova.compute.manager [None req-ef39e50f-5ce8-4286-9c89-acf2f5f9a4d4 tempest-VolumesAdminNegativeTest-585860034 tempest-VolumesAdminNegativeTest-585860034-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port b1a727a8-b3e3-44dd-8a7a-6fd89dcfa153, please check neutron logs for more information. [ 723.594609] env[62503]: ERROR nova.compute.manager Traceback (most recent call last): [ 723.594609] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 723.594609] env[62503]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 723.594609] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 723.594609] env[62503]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 723.594609] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 723.594609] env[62503]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 723.594609] env[62503]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 723.594609] env[62503]: ERROR nova.compute.manager self.force_reraise() [ 723.594609] env[62503]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 723.594609] env[62503]: ERROR nova.compute.manager raise self.value [ 723.594609] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 723.594609] env[62503]: ERROR nova.compute.manager updated_port = self._update_port( [ 723.594609] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 723.594609] env[62503]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 723.595110] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 723.595110] env[62503]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 723.595110] env[62503]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port b1a727a8-b3e3-44dd-8a7a-6fd89dcfa153, please check neutron logs for more information. [ 723.595110] env[62503]: ERROR nova.compute.manager [ 723.595110] env[62503]: Traceback (most recent call last): [ 723.595110] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 723.595110] env[62503]: listener.cb(fileno) [ 723.595110] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 723.595110] env[62503]: result = function(*args, **kwargs) [ 723.595110] env[62503]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 723.595110] env[62503]: return func(*args, **kwargs) [ 723.595110] env[62503]: File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 723.595110] env[62503]: raise e [ 723.595110] env[62503]: File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 723.595110] env[62503]: nwinfo = self.network_api.allocate_for_instance( [ 723.595110] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 723.595110] env[62503]: created_port_ids = self._update_ports_for_instance( [ 723.595110] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 723.595110] env[62503]: with excutils.save_and_reraise_exception(): [ 723.595110] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 723.595110] env[62503]: self.force_reraise() [ 723.595110] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 723.595110] env[62503]: raise self.value [ 723.595110] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 723.595110] env[62503]: updated_port = self._update_port( [ 723.595110] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 723.595110] env[62503]: _ensure_no_port_binding_failure(port) [ 723.595110] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 723.595110] env[62503]: raise exception.PortBindingFailed(port_id=port['id']) [ 723.596023] env[62503]: nova.exception.PortBindingFailed: Binding failed for port b1a727a8-b3e3-44dd-8a7a-6fd89dcfa153, please check neutron logs for more information. [ 723.596023] env[62503]: Removing descriptor: 16 [ 723.596900] env[62503]: DEBUG nova.virt.hardware [None req-ef39e50f-5ce8-4286-9c89-acf2f5f9a4d4 tempest-VolumesAdminNegativeTest-585860034 tempest-VolumesAdminNegativeTest-585860034-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:26:20Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:26:03Z,direct_url=,disk_format='vmdk',id=8150ca02-f879-471d-8913-459408f127a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26d9ee75d25b4018b8bb1fb11c8bd98c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:26:04Z,virtual_size=,visibility=), allow threads: False {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 723.597136] env[62503]: DEBUG nova.virt.hardware [None req-ef39e50f-5ce8-4286-9c89-acf2f5f9a4d4 tempest-VolumesAdminNegativeTest-585860034 tempest-VolumesAdminNegativeTest-585860034-project-member] Flavor limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 723.597292] env[62503]: DEBUG nova.virt.hardware [None req-ef39e50f-5ce8-4286-9c89-acf2f5f9a4d4 tempest-VolumesAdminNegativeTest-585860034 tempest-VolumesAdminNegativeTest-585860034-project-member] Image limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 723.597473] env[62503]: DEBUG nova.virt.hardware [None req-ef39e50f-5ce8-4286-9c89-acf2f5f9a4d4 tempest-VolumesAdminNegativeTest-585860034 tempest-VolumesAdminNegativeTest-585860034-project-member] Flavor pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 723.597614] env[62503]: DEBUG nova.virt.hardware [None req-ef39e50f-5ce8-4286-9c89-acf2f5f9a4d4 tempest-VolumesAdminNegativeTest-585860034 tempest-VolumesAdminNegativeTest-585860034-project-member] Image pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 723.597761] env[62503]: DEBUG nova.virt.hardware [None req-ef39e50f-5ce8-4286-9c89-acf2f5f9a4d4 tempest-VolumesAdminNegativeTest-585860034 tempest-VolumesAdminNegativeTest-585860034-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 723.597963] env[62503]: DEBUG nova.virt.hardware [None req-ef39e50f-5ce8-4286-9c89-acf2f5f9a4d4 tempest-VolumesAdminNegativeTest-585860034 tempest-VolumesAdminNegativeTest-585860034-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 723.598131] env[62503]: DEBUG nova.virt.hardware [None req-ef39e50f-5ce8-4286-9c89-acf2f5f9a4d4 tempest-VolumesAdminNegativeTest-585860034 tempest-VolumesAdminNegativeTest-585860034-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 723.598358] env[62503]: DEBUG nova.virt.hardware [None req-ef39e50f-5ce8-4286-9c89-acf2f5f9a4d4 tempest-VolumesAdminNegativeTest-585860034 tempest-VolumesAdminNegativeTest-585860034-project-member] Got 1 possible topologies {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 723.598538] env[62503]: DEBUG nova.virt.hardware [None req-ef39e50f-5ce8-4286-9c89-acf2f5f9a4d4 tempest-VolumesAdminNegativeTest-585860034 tempest-VolumesAdminNegativeTest-585860034-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 723.598786] env[62503]: DEBUG nova.virt.hardware [None req-ef39e50f-5ce8-4286-9c89-acf2f5f9a4d4 tempest-VolumesAdminNegativeTest-585860034 tempest-VolumesAdminNegativeTest-585860034-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 723.599931] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cebfe52-b93a-42d1-acb2-97272e99650e {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.607825] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a61c62f-fa60-4c19-b82f-752b442cef40 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.621167] env[62503]: ERROR nova.compute.manager [None req-ef39e50f-5ce8-4286-9c89-acf2f5f9a4d4 tempest-VolumesAdminNegativeTest-585860034 tempest-VolumesAdminNegativeTest-585860034-project-member] [instance: 0b40f385-db0a-460c-b7fd-47e4d6afbaf9] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port b1a727a8-b3e3-44dd-8a7a-6fd89dcfa153, please check neutron logs for more information. [ 723.621167] env[62503]: ERROR nova.compute.manager [instance: 0b40f385-db0a-460c-b7fd-47e4d6afbaf9] Traceback (most recent call last): [ 723.621167] env[62503]: ERROR nova.compute.manager [instance: 0b40f385-db0a-460c-b7fd-47e4d6afbaf9] File "/opt/stack/nova/nova/compute/manager.py", line 2897, in _build_resources [ 723.621167] env[62503]: ERROR nova.compute.manager [instance: 0b40f385-db0a-460c-b7fd-47e4d6afbaf9] yield resources [ 723.621167] env[62503]: ERROR nova.compute.manager [instance: 0b40f385-db0a-460c-b7fd-47e4d6afbaf9] File "/opt/stack/nova/nova/compute/manager.py", line 2644, in _build_and_run_instance [ 723.621167] env[62503]: ERROR nova.compute.manager [instance: 0b40f385-db0a-460c-b7fd-47e4d6afbaf9] self.driver.spawn(context, instance, image_meta, [ 723.621167] env[62503]: ERROR nova.compute.manager [instance: 0b40f385-db0a-460c-b7fd-47e4d6afbaf9] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 723.621167] env[62503]: ERROR nova.compute.manager [instance: 0b40f385-db0a-460c-b7fd-47e4d6afbaf9] self._vmops.spawn(context, instance, image_meta, injected_files, [ 723.621167] env[62503]: ERROR nova.compute.manager [instance: 0b40f385-db0a-460c-b7fd-47e4d6afbaf9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 723.621167] env[62503]: ERROR nova.compute.manager [instance: 0b40f385-db0a-460c-b7fd-47e4d6afbaf9] vm_ref = self.build_virtual_machine(instance, [ 723.621167] env[62503]: ERROR nova.compute.manager [instance: 0b40f385-db0a-460c-b7fd-47e4d6afbaf9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 723.621547] env[62503]: ERROR nova.compute.manager [instance: 0b40f385-db0a-460c-b7fd-47e4d6afbaf9] vif_infos = vmwarevif.get_vif_info(self._session, [ 723.621547] env[62503]: ERROR nova.compute.manager [instance: 0b40f385-db0a-460c-b7fd-47e4d6afbaf9] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 723.621547] env[62503]: ERROR nova.compute.manager [instance: 0b40f385-db0a-460c-b7fd-47e4d6afbaf9] for vif in network_info: [ 723.621547] env[62503]: ERROR nova.compute.manager [instance: 0b40f385-db0a-460c-b7fd-47e4d6afbaf9] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 723.621547] env[62503]: ERROR nova.compute.manager [instance: 0b40f385-db0a-460c-b7fd-47e4d6afbaf9] return self._sync_wrapper(fn, *args, **kwargs) [ 723.621547] env[62503]: ERROR nova.compute.manager [instance: 0b40f385-db0a-460c-b7fd-47e4d6afbaf9] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 723.621547] env[62503]: ERROR nova.compute.manager [instance: 0b40f385-db0a-460c-b7fd-47e4d6afbaf9] self.wait() [ 723.621547] env[62503]: ERROR nova.compute.manager [instance: 0b40f385-db0a-460c-b7fd-47e4d6afbaf9] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 723.621547] env[62503]: ERROR nova.compute.manager [instance: 0b40f385-db0a-460c-b7fd-47e4d6afbaf9] self[:] = self._gt.wait() [ 723.621547] env[62503]: ERROR nova.compute.manager [instance: 0b40f385-db0a-460c-b7fd-47e4d6afbaf9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 723.621547] env[62503]: ERROR nova.compute.manager [instance: 0b40f385-db0a-460c-b7fd-47e4d6afbaf9] return self._exit_event.wait() [ 723.621547] env[62503]: ERROR nova.compute.manager [instance: 0b40f385-db0a-460c-b7fd-47e4d6afbaf9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 723.621547] env[62503]: ERROR nova.compute.manager [instance: 0b40f385-db0a-460c-b7fd-47e4d6afbaf9] current.throw(*self._exc) [ 723.621919] env[62503]: ERROR nova.compute.manager [instance: 0b40f385-db0a-460c-b7fd-47e4d6afbaf9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 723.621919] env[62503]: ERROR nova.compute.manager [instance: 0b40f385-db0a-460c-b7fd-47e4d6afbaf9] result = function(*args, **kwargs) [ 723.621919] env[62503]: ERROR nova.compute.manager [instance: 0b40f385-db0a-460c-b7fd-47e4d6afbaf9] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 723.621919] env[62503]: ERROR nova.compute.manager [instance: 0b40f385-db0a-460c-b7fd-47e4d6afbaf9] return func(*args, **kwargs) [ 723.621919] env[62503]: ERROR nova.compute.manager [instance: 0b40f385-db0a-460c-b7fd-47e4d6afbaf9] File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 723.621919] env[62503]: ERROR nova.compute.manager [instance: 0b40f385-db0a-460c-b7fd-47e4d6afbaf9] raise e [ 723.621919] env[62503]: ERROR nova.compute.manager [instance: 0b40f385-db0a-460c-b7fd-47e4d6afbaf9] File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 723.621919] env[62503]: ERROR nova.compute.manager [instance: 0b40f385-db0a-460c-b7fd-47e4d6afbaf9] nwinfo = self.network_api.allocate_for_instance( [ 723.621919] env[62503]: ERROR nova.compute.manager [instance: 0b40f385-db0a-460c-b7fd-47e4d6afbaf9] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 723.621919] env[62503]: ERROR nova.compute.manager [instance: 0b40f385-db0a-460c-b7fd-47e4d6afbaf9] created_port_ids = self._update_ports_for_instance( [ 723.621919] env[62503]: ERROR nova.compute.manager [instance: 0b40f385-db0a-460c-b7fd-47e4d6afbaf9] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 723.621919] env[62503]: ERROR nova.compute.manager [instance: 0b40f385-db0a-460c-b7fd-47e4d6afbaf9] with excutils.save_and_reraise_exception(): [ 723.621919] env[62503]: ERROR nova.compute.manager [instance: 0b40f385-db0a-460c-b7fd-47e4d6afbaf9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 723.622320] env[62503]: ERROR nova.compute.manager [instance: 0b40f385-db0a-460c-b7fd-47e4d6afbaf9] self.force_reraise() [ 723.622320] env[62503]: ERROR nova.compute.manager [instance: 0b40f385-db0a-460c-b7fd-47e4d6afbaf9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 723.622320] env[62503]: ERROR nova.compute.manager [instance: 0b40f385-db0a-460c-b7fd-47e4d6afbaf9] raise self.value [ 723.622320] env[62503]: ERROR nova.compute.manager [instance: 0b40f385-db0a-460c-b7fd-47e4d6afbaf9] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 723.622320] env[62503]: ERROR nova.compute.manager [instance: 0b40f385-db0a-460c-b7fd-47e4d6afbaf9] updated_port = self._update_port( [ 723.622320] env[62503]: ERROR nova.compute.manager [instance: 0b40f385-db0a-460c-b7fd-47e4d6afbaf9] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 723.622320] env[62503]: ERROR nova.compute.manager [instance: 0b40f385-db0a-460c-b7fd-47e4d6afbaf9] _ensure_no_port_binding_failure(port) [ 723.622320] env[62503]: ERROR nova.compute.manager [instance: 0b40f385-db0a-460c-b7fd-47e4d6afbaf9] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 723.622320] env[62503]: ERROR nova.compute.manager [instance: 0b40f385-db0a-460c-b7fd-47e4d6afbaf9] raise exception.PortBindingFailed(port_id=port['id']) [ 723.622320] env[62503]: ERROR nova.compute.manager [instance: 0b40f385-db0a-460c-b7fd-47e4d6afbaf9] nova.exception.PortBindingFailed: Binding failed for port b1a727a8-b3e3-44dd-8a7a-6fd89dcfa153, please check neutron logs for more information. [ 723.622320] env[62503]: ERROR nova.compute.manager [instance: 0b40f385-db0a-460c-b7fd-47e4d6afbaf9] [ 723.622320] env[62503]: INFO nova.compute.manager [None req-ef39e50f-5ce8-4286-9c89-acf2f5f9a4d4 tempest-VolumesAdminNegativeTest-585860034 tempest-VolumesAdminNegativeTest-585860034-project-member] [instance: 0b40f385-db0a-460c-b7fd-47e4d6afbaf9] Terminating instance [ 723.623449] env[62503]: DEBUG oslo_concurrency.lockutils [None req-ef39e50f-5ce8-4286-9c89-acf2f5f9a4d4 tempest-VolumesAdminNegativeTest-585860034 tempest-VolumesAdminNegativeTest-585860034-project-member] Acquiring lock "refresh_cache-0b40f385-db0a-460c-b7fd-47e4d6afbaf9" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 723.731608] env[62503]: DEBUG nova.network.neutron [req-2b4da0ec-7099-4470-8af5-51446b1272e8 req-8c2fd23b-11f1-48f5-875e-3e4540cdb50f service nova] [instance: 0b40f385-db0a-460c-b7fd-47e4d6afbaf9] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 723.746403] env[62503]: INFO nova.compute.manager [None req-53bd46c4-9392-486d-a978-bb7729f67092 tempest-ServerActionsV293TestJSON-923794683 tempest-ServerActionsV293TestJSON-923794683-project-member] [instance: 97ac40d6-1c29-4282-86e5-be27a20cf5e0] Took 0.56 seconds to detach 1 volumes for instance. [ 723.747520] env[62503]: DEBUG nova.compute.claims [None req-53bd46c4-9392-486d-a978-bb7729f67092 tempest-ServerActionsV293TestJSON-923794683 tempest-ServerActionsV293TestJSON-923794683-project-member] [instance: 97ac40d6-1c29-4282-86e5-be27a20cf5e0] Aborting claim: {{(pid=62503) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 723.747748] env[62503]: DEBUG oslo_concurrency.lockutils [None req-53bd46c4-9392-486d-a978-bb7729f67092 tempest-ServerActionsV293TestJSON-923794683 tempest-ServerActionsV293TestJSON-923794683-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 723.826254] env[62503]: DEBUG nova.network.neutron [req-2b4da0ec-7099-4470-8af5-51446b1272e8 req-8c2fd23b-11f1-48f5-875e-3e4540cdb50f service nova] [instance: 0b40f385-db0a-460c-b7fd-47e4d6afbaf9] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 723.893167] env[62503]: DEBUG oslo_concurrency.lockutils [None req-d48c1997-54b2-410a-b1fd-f52e7748bdfd tempest-ServerGroupTestJSON-1883967840 tempest-ServerGroupTestJSON-1883967840-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 723.959686] env[62503]: DEBUG oslo_concurrency.lockutils [None req-98b7d7bf-9e47-4fe2-b967-b3c7085ad444 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.415s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 723.960135] env[62503]: DEBUG nova.compute.manager [None req-98b7d7bf-9e47-4fe2-b967-b3c7085ad444 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: c1693c1f-6497-429c-a7f7-5bf5591684d1] Start building networks asynchronously for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2832}} [ 723.962931] env[62503]: DEBUG oslo_concurrency.lockutils [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.849s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 724.328603] env[62503]: DEBUG oslo_concurrency.lockutils [req-2b4da0ec-7099-4470-8af5-51446b1272e8 req-8c2fd23b-11f1-48f5-875e-3e4540cdb50f service nova] Releasing lock "refresh_cache-0b40f385-db0a-460c-b7fd-47e4d6afbaf9" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 724.329038] env[62503]: DEBUG oslo_concurrency.lockutils [None req-ef39e50f-5ce8-4286-9c89-acf2f5f9a4d4 tempest-VolumesAdminNegativeTest-585860034 tempest-VolumesAdminNegativeTest-585860034-project-member] Acquired lock "refresh_cache-0b40f385-db0a-460c-b7fd-47e4d6afbaf9" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 724.329235] env[62503]: DEBUG nova.network.neutron [None req-ef39e50f-5ce8-4286-9c89-acf2f5f9a4d4 tempest-VolumesAdminNegativeTest-585860034 tempest-VolumesAdminNegativeTest-585860034-project-member] [instance: 0b40f385-db0a-460c-b7fd-47e4d6afbaf9] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 724.467666] env[62503]: DEBUG nova.compute.utils [None req-98b7d7bf-9e47-4fe2-b967-b3c7085ad444 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Using /dev/sd instead of None {{(pid=62503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 724.471943] env[62503]: DEBUG nova.compute.manager [None req-98b7d7bf-9e47-4fe2-b967-b3c7085ad444 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: c1693c1f-6497-429c-a7f7-5bf5591684d1] Allocating IP information in the background. {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 724.472332] env[62503]: DEBUG nova.network.neutron [None req-98b7d7bf-9e47-4fe2-b967-b3c7085ad444 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: c1693c1f-6497-429c-a7f7-5bf5591684d1] allocate_for_instance() {{(pid=62503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 724.519532] env[62503]: DEBUG nova.policy [None req-98b7d7bf-9e47-4fe2-b967-b3c7085ad444 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0b072e4c8ef94b26895d59ede518aaad', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0849093c8b48400a8e9d56171ea99e8f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62503) authorize /opt/stack/nova/nova/policy.py:201}} [ 724.786395] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea71c118-ea1d-4ef9-96ac-427ab87f3ff4 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.789879] env[62503]: DEBUG nova.network.neutron [None req-98b7d7bf-9e47-4fe2-b967-b3c7085ad444 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: c1693c1f-6497-429c-a7f7-5bf5591684d1] Successfully created port: 4e5bf34c-de80-4cd1-9d3f-8a7735e994a3 {{(pid=62503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 724.797686] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af71f4b5-322a-477c-a2b7-c2b8092b9fba {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.830282] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61e41fa4-51a9-4a63-b76a-7c3320169fae {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.840786] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73b51471-9b78-40a6-bdcb-d38bc3de7182 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.856737] env[62503]: DEBUG nova.compute.provider_tree [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 724.858982] env[62503]: DEBUG nova.network.neutron [None req-ef39e50f-5ce8-4286-9c89-acf2f5f9a4d4 tempest-VolumesAdminNegativeTest-585860034 tempest-VolumesAdminNegativeTest-585860034-project-member] [instance: 0b40f385-db0a-460c-b7fd-47e4d6afbaf9] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 724.950918] env[62503]: DEBUG nova.network.neutron [None req-ef39e50f-5ce8-4286-9c89-acf2f5f9a4d4 tempest-VolumesAdminNegativeTest-585860034 tempest-VolumesAdminNegativeTest-585860034-project-member] [instance: 0b40f385-db0a-460c-b7fd-47e4d6afbaf9] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 724.974626] env[62503]: DEBUG nova.compute.manager [None req-98b7d7bf-9e47-4fe2-b967-b3c7085ad444 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: c1693c1f-6497-429c-a7f7-5bf5591684d1] Start building block device mappings for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2867}} [ 725.246600] env[62503]: DEBUG nova.compute.manager [req-8cdb46b0-4648-4d61-ba79-fa1a5726ee18 req-231c64b2-549b-4088-9677-37ad5a9412db service nova] [instance: 0b40f385-db0a-460c-b7fd-47e4d6afbaf9] Received event network-vif-deleted-b1a727a8-b3e3-44dd-8a7a-6fd89dcfa153 {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 725.362292] env[62503]: DEBUG nova.scheduler.client.report [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 725.453715] env[62503]: DEBUG oslo_concurrency.lockutils [None req-ef39e50f-5ce8-4286-9c89-acf2f5f9a4d4 tempest-VolumesAdminNegativeTest-585860034 tempest-VolumesAdminNegativeTest-585860034-project-member] Releasing lock "refresh_cache-0b40f385-db0a-460c-b7fd-47e4d6afbaf9" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 725.454233] env[62503]: DEBUG nova.compute.manager [None req-ef39e50f-5ce8-4286-9c89-acf2f5f9a4d4 tempest-VolumesAdminNegativeTest-585860034 tempest-VolumesAdminNegativeTest-585860034-project-member] [instance: 0b40f385-db0a-460c-b7fd-47e4d6afbaf9] Start destroying the instance on the hypervisor. {{(pid=62503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3156}} [ 725.454357] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-ef39e50f-5ce8-4286-9c89-acf2f5f9a4d4 tempest-VolumesAdminNegativeTest-585860034 tempest-VolumesAdminNegativeTest-585860034-project-member] [instance: 0b40f385-db0a-460c-b7fd-47e4d6afbaf9] Destroying instance {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 725.454654] env[62503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-97c049b6-5586-4010-bbd4-9023ce6580e9 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.463206] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8baff3aa-b752-482c-a203-781f923a613f {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.486010] env[62503]: WARNING nova.virt.vmwareapi.vmops [None req-ef39e50f-5ce8-4286-9c89-acf2f5f9a4d4 tempest-VolumesAdminNegativeTest-585860034 tempest-VolumesAdminNegativeTest-585860034-project-member] [instance: 0b40f385-db0a-460c-b7fd-47e4d6afbaf9] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 0b40f385-db0a-460c-b7fd-47e4d6afbaf9 could not be found. [ 725.486287] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-ef39e50f-5ce8-4286-9c89-acf2f5f9a4d4 tempest-VolumesAdminNegativeTest-585860034 tempest-VolumesAdminNegativeTest-585860034-project-member] [instance: 0b40f385-db0a-460c-b7fd-47e4d6afbaf9] Instance destroyed {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 725.486435] env[62503]: INFO nova.compute.manager [None req-ef39e50f-5ce8-4286-9c89-acf2f5f9a4d4 tempest-VolumesAdminNegativeTest-585860034 tempest-VolumesAdminNegativeTest-585860034-project-member] [instance: 0b40f385-db0a-460c-b7fd-47e4d6afbaf9] Took 0.03 seconds to destroy the instance on the hypervisor. [ 725.486674] env[62503]: DEBUG oslo.service.loopingcall [None req-ef39e50f-5ce8-4286-9c89-acf2f5f9a4d4 tempest-VolumesAdminNegativeTest-585860034 tempest-VolumesAdminNegativeTest-585860034-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 725.487109] env[62503]: DEBUG nova.compute.manager [-] [instance: 0b40f385-db0a-460c-b7fd-47e4d6afbaf9] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 725.487204] env[62503]: DEBUG nova.network.neutron [-] [instance: 0b40f385-db0a-460c-b7fd-47e4d6afbaf9] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 725.509083] env[62503]: DEBUG nova.network.neutron [-] [instance: 0b40f385-db0a-460c-b7fd-47e4d6afbaf9] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 725.783456] env[62503]: ERROR nova.compute.manager [None req-98b7d7bf-9e47-4fe2-b967-b3c7085ad444 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 4e5bf34c-de80-4cd1-9d3f-8a7735e994a3, please check neutron logs for more information. [ 725.783456] env[62503]: ERROR nova.compute.manager Traceback (most recent call last): [ 725.783456] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 725.783456] env[62503]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 725.783456] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 725.783456] env[62503]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 725.783456] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 725.783456] env[62503]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 725.783456] env[62503]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 725.783456] env[62503]: ERROR nova.compute.manager self.force_reraise() [ 725.783456] env[62503]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 725.783456] env[62503]: ERROR nova.compute.manager raise self.value [ 725.783456] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 725.783456] env[62503]: ERROR nova.compute.manager updated_port = self._update_port( [ 725.783456] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 725.783456] env[62503]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 725.784058] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 725.784058] env[62503]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 725.784058] env[62503]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 4e5bf34c-de80-4cd1-9d3f-8a7735e994a3, please check neutron logs for more information. [ 725.784058] env[62503]: ERROR nova.compute.manager [ 725.784058] env[62503]: Traceback (most recent call last): [ 725.784058] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 725.784058] env[62503]: listener.cb(fileno) [ 725.784058] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 725.784058] env[62503]: result = function(*args, **kwargs) [ 725.784058] env[62503]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 725.784058] env[62503]: return func(*args, **kwargs) [ 725.784058] env[62503]: File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 725.784058] env[62503]: raise e [ 725.784058] env[62503]: File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 725.784058] env[62503]: nwinfo = self.network_api.allocate_for_instance( [ 725.784058] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 725.784058] env[62503]: created_port_ids = self._update_ports_for_instance( [ 725.784058] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 725.784058] env[62503]: with excutils.save_and_reraise_exception(): [ 725.784058] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 725.784058] env[62503]: self.force_reraise() [ 725.784058] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 725.784058] env[62503]: raise self.value [ 725.784058] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 725.784058] env[62503]: updated_port = self._update_port( [ 725.784058] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 725.784058] env[62503]: _ensure_no_port_binding_failure(port) [ 725.784058] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 725.784058] env[62503]: raise exception.PortBindingFailed(port_id=port['id']) [ 725.784905] env[62503]: nova.exception.PortBindingFailed: Binding failed for port 4e5bf34c-de80-4cd1-9d3f-8a7735e994a3, please check neutron logs for more information. [ 725.784905] env[62503]: Removing descriptor: 16 [ 725.868047] env[62503]: DEBUG oslo_concurrency.lockutils [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.905s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 725.868760] env[62503]: ERROR nova.compute.manager [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] [instance: 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port ef09a46d-90a7-42a1-b67d-54eeef8f5b29, please check neutron logs for more information. [ 725.868760] env[62503]: ERROR nova.compute.manager [instance: 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9] Traceback (most recent call last): [ 725.868760] env[62503]: ERROR nova.compute.manager [instance: 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9] File "/opt/stack/nova/nova/compute/manager.py", line 2644, in _build_and_run_instance [ 725.868760] env[62503]: ERROR nova.compute.manager [instance: 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9] self.driver.spawn(context, instance, image_meta, [ 725.868760] env[62503]: ERROR nova.compute.manager [instance: 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 725.868760] env[62503]: ERROR nova.compute.manager [instance: 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9] self._vmops.spawn(context, instance, image_meta, injected_files, [ 725.868760] env[62503]: ERROR nova.compute.manager [instance: 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 725.868760] env[62503]: ERROR nova.compute.manager [instance: 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9] vm_ref = self.build_virtual_machine(instance, [ 725.868760] env[62503]: ERROR nova.compute.manager [instance: 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 725.868760] env[62503]: ERROR nova.compute.manager [instance: 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9] vif_infos = vmwarevif.get_vif_info(self._session, [ 725.868760] env[62503]: ERROR nova.compute.manager [instance: 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 725.869167] env[62503]: ERROR nova.compute.manager [instance: 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9] for vif in network_info: [ 725.869167] env[62503]: ERROR nova.compute.manager [instance: 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 725.869167] env[62503]: ERROR nova.compute.manager [instance: 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9] return self._sync_wrapper(fn, *args, **kwargs) [ 725.869167] env[62503]: ERROR nova.compute.manager [instance: 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 725.869167] env[62503]: ERROR nova.compute.manager [instance: 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9] self.wait() [ 725.869167] env[62503]: ERROR nova.compute.manager [instance: 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 725.869167] env[62503]: ERROR nova.compute.manager [instance: 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9] self[:] = self._gt.wait() [ 725.869167] env[62503]: ERROR nova.compute.manager [instance: 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 725.869167] env[62503]: ERROR nova.compute.manager [instance: 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9] return self._exit_event.wait() [ 725.869167] env[62503]: ERROR nova.compute.manager [instance: 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 725.869167] env[62503]: ERROR nova.compute.manager [instance: 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9] result = hub.switch() [ 725.869167] env[62503]: ERROR nova.compute.manager [instance: 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 725.869167] env[62503]: ERROR nova.compute.manager [instance: 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9] return self.greenlet.switch() [ 725.869550] env[62503]: ERROR nova.compute.manager [instance: 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 725.869550] env[62503]: ERROR nova.compute.manager [instance: 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9] result = function(*args, **kwargs) [ 725.869550] env[62503]: ERROR nova.compute.manager [instance: 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 725.869550] env[62503]: ERROR nova.compute.manager [instance: 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9] return func(*args, **kwargs) [ 725.869550] env[62503]: ERROR nova.compute.manager [instance: 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9] File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 725.869550] env[62503]: ERROR nova.compute.manager [instance: 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9] raise e [ 725.869550] env[62503]: ERROR nova.compute.manager [instance: 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9] File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 725.869550] env[62503]: ERROR nova.compute.manager [instance: 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9] nwinfo = self.network_api.allocate_for_instance( [ 725.869550] env[62503]: ERROR nova.compute.manager [instance: 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 725.869550] env[62503]: ERROR nova.compute.manager [instance: 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9] created_port_ids = self._update_ports_for_instance( [ 725.869550] env[62503]: ERROR nova.compute.manager [instance: 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 725.869550] env[62503]: ERROR nova.compute.manager [instance: 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9] with excutils.save_and_reraise_exception(): [ 725.869550] env[62503]: ERROR nova.compute.manager [instance: 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 725.869934] env[62503]: ERROR nova.compute.manager [instance: 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9] self.force_reraise() [ 725.869934] env[62503]: ERROR nova.compute.manager [instance: 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 725.869934] env[62503]: ERROR nova.compute.manager [instance: 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9] raise self.value [ 725.869934] env[62503]: ERROR nova.compute.manager [instance: 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 725.869934] env[62503]: ERROR nova.compute.manager [instance: 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9] updated_port = self._update_port( [ 725.869934] env[62503]: ERROR nova.compute.manager [instance: 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 725.869934] env[62503]: ERROR nova.compute.manager [instance: 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9] _ensure_no_port_binding_failure(port) [ 725.869934] env[62503]: ERROR nova.compute.manager [instance: 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 725.869934] env[62503]: ERROR nova.compute.manager [instance: 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9] raise exception.PortBindingFailed(port_id=port['id']) [ 725.869934] env[62503]: ERROR nova.compute.manager [instance: 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9] nova.exception.PortBindingFailed: Binding failed for port ef09a46d-90a7-42a1-b67d-54eeef8f5b29, please check neutron logs for more information. [ 725.869934] env[62503]: ERROR nova.compute.manager [instance: 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9] [ 725.870259] env[62503]: DEBUG nova.compute.utils [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] [instance: 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9] Binding failed for port ef09a46d-90a7-42a1-b67d-54eeef8f5b29, please check neutron logs for more information. {{(pid=62503) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 725.870592] env[62503]: DEBUG oslo_concurrency.lockutils [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 16.709s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 725.873426] env[62503]: DEBUG nova.compute.manager [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] [instance: 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9] Build of instance 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9 was re-scheduled: Binding failed for port ef09a46d-90a7-42a1-b67d-54eeef8f5b29, please check neutron logs for more information. {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2483}} [ 725.873847] env[62503]: DEBUG nova.compute.manager [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] [instance: 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9] Unplugging VIFs for instance {{(pid=62503) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3009}} [ 725.874080] env[62503]: DEBUG oslo_concurrency.lockutils [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] Acquiring lock "refresh_cache-0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 725.874227] env[62503]: DEBUG oslo_concurrency.lockutils [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] Acquired lock "refresh_cache-0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 725.874519] env[62503]: DEBUG nova.network.neutron [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] [instance: 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 725.988474] env[62503]: DEBUG nova.compute.manager [None req-98b7d7bf-9e47-4fe2-b967-b3c7085ad444 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: c1693c1f-6497-429c-a7f7-5bf5591684d1] Start spawning the instance on the hypervisor. {{(pid=62503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2641}} [ 726.011374] env[62503]: DEBUG nova.network.neutron [-] [instance: 0b40f385-db0a-460c-b7fd-47e4d6afbaf9] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 726.016357] env[62503]: DEBUG nova.virt.hardware [None req-98b7d7bf-9e47-4fe2-b967-b3c7085ad444 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:26:20Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:26:03Z,direct_url=,disk_format='vmdk',id=8150ca02-f879-471d-8913-459408f127a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26d9ee75d25b4018b8bb1fb11c8bd98c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:26:04Z,virtual_size=,visibility=), allow threads: False {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 726.016678] env[62503]: DEBUG nova.virt.hardware [None req-98b7d7bf-9e47-4fe2-b967-b3c7085ad444 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Flavor limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 726.016854] env[62503]: DEBUG nova.virt.hardware [None req-98b7d7bf-9e47-4fe2-b967-b3c7085ad444 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Image limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 726.017054] env[62503]: DEBUG nova.virt.hardware [None req-98b7d7bf-9e47-4fe2-b967-b3c7085ad444 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Flavor pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 726.017203] env[62503]: DEBUG nova.virt.hardware [None req-98b7d7bf-9e47-4fe2-b967-b3c7085ad444 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Image pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 726.017347] env[62503]: DEBUG nova.virt.hardware [None req-98b7d7bf-9e47-4fe2-b967-b3c7085ad444 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 726.017550] env[62503]: DEBUG nova.virt.hardware [None req-98b7d7bf-9e47-4fe2-b967-b3c7085ad444 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 726.017707] env[62503]: DEBUG nova.virt.hardware [None req-98b7d7bf-9e47-4fe2-b967-b3c7085ad444 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 726.017868] env[62503]: DEBUG nova.virt.hardware [None req-98b7d7bf-9e47-4fe2-b967-b3c7085ad444 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Got 1 possible topologies {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 726.018032] env[62503]: DEBUG nova.virt.hardware [None req-98b7d7bf-9e47-4fe2-b967-b3c7085ad444 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 726.018206] env[62503]: DEBUG nova.virt.hardware [None req-98b7d7bf-9e47-4fe2-b967-b3c7085ad444 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 726.019167] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-435eb271-23fd-4f8e-b991-0a09cb34a818 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.028233] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51ed6420-d280-4ec8-ac1e-802dcfe4cd0a {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.042920] env[62503]: ERROR nova.compute.manager [None req-98b7d7bf-9e47-4fe2-b967-b3c7085ad444 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: c1693c1f-6497-429c-a7f7-5bf5591684d1] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 4e5bf34c-de80-4cd1-9d3f-8a7735e994a3, please check neutron logs for more information. [ 726.042920] env[62503]: ERROR nova.compute.manager [instance: c1693c1f-6497-429c-a7f7-5bf5591684d1] Traceback (most recent call last): [ 726.042920] env[62503]: ERROR nova.compute.manager [instance: c1693c1f-6497-429c-a7f7-5bf5591684d1] File "/opt/stack/nova/nova/compute/manager.py", line 2897, in _build_resources [ 726.042920] env[62503]: ERROR nova.compute.manager [instance: c1693c1f-6497-429c-a7f7-5bf5591684d1] yield resources [ 726.042920] env[62503]: ERROR nova.compute.manager [instance: c1693c1f-6497-429c-a7f7-5bf5591684d1] File "/opt/stack/nova/nova/compute/manager.py", line 2644, in _build_and_run_instance [ 726.042920] env[62503]: ERROR nova.compute.manager [instance: c1693c1f-6497-429c-a7f7-5bf5591684d1] self.driver.spawn(context, instance, image_meta, [ 726.042920] env[62503]: ERROR nova.compute.manager [instance: c1693c1f-6497-429c-a7f7-5bf5591684d1] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 726.042920] env[62503]: ERROR nova.compute.manager [instance: c1693c1f-6497-429c-a7f7-5bf5591684d1] self._vmops.spawn(context, instance, image_meta, injected_files, [ 726.042920] env[62503]: ERROR nova.compute.manager [instance: c1693c1f-6497-429c-a7f7-5bf5591684d1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 726.042920] env[62503]: ERROR nova.compute.manager [instance: c1693c1f-6497-429c-a7f7-5bf5591684d1] vm_ref = self.build_virtual_machine(instance, [ 726.042920] env[62503]: ERROR nova.compute.manager [instance: c1693c1f-6497-429c-a7f7-5bf5591684d1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 726.043406] env[62503]: ERROR nova.compute.manager [instance: c1693c1f-6497-429c-a7f7-5bf5591684d1] vif_infos = vmwarevif.get_vif_info(self._session, [ 726.043406] env[62503]: ERROR nova.compute.manager [instance: c1693c1f-6497-429c-a7f7-5bf5591684d1] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 726.043406] env[62503]: ERROR nova.compute.manager [instance: c1693c1f-6497-429c-a7f7-5bf5591684d1] for vif in network_info: [ 726.043406] env[62503]: ERROR nova.compute.manager [instance: c1693c1f-6497-429c-a7f7-5bf5591684d1] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 726.043406] env[62503]: ERROR nova.compute.manager [instance: c1693c1f-6497-429c-a7f7-5bf5591684d1] return self._sync_wrapper(fn, *args, **kwargs) [ 726.043406] env[62503]: ERROR nova.compute.manager [instance: c1693c1f-6497-429c-a7f7-5bf5591684d1] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 726.043406] env[62503]: ERROR nova.compute.manager [instance: c1693c1f-6497-429c-a7f7-5bf5591684d1] self.wait() [ 726.043406] env[62503]: ERROR nova.compute.manager [instance: c1693c1f-6497-429c-a7f7-5bf5591684d1] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 726.043406] env[62503]: ERROR nova.compute.manager [instance: c1693c1f-6497-429c-a7f7-5bf5591684d1] self[:] = self._gt.wait() [ 726.043406] env[62503]: ERROR nova.compute.manager [instance: c1693c1f-6497-429c-a7f7-5bf5591684d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 726.043406] env[62503]: ERROR nova.compute.manager [instance: c1693c1f-6497-429c-a7f7-5bf5591684d1] return self._exit_event.wait() [ 726.043406] env[62503]: ERROR nova.compute.manager [instance: c1693c1f-6497-429c-a7f7-5bf5591684d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 726.043406] env[62503]: ERROR nova.compute.manager [instance: c1693c1f-6497-429c-a7f7-5bf5591684d1] current.throw(*self._exc) [ 726.043824] env[62503]: ERROR nova.compute.manager [instance: c1693c1f-6497-429c-a7f7-5bf5591684d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 726.043824] env[62503]: ERROR nova.compute.manager [instance: c1693c1f-6497-429c-a7f7-5bf5591684d1] result = function(*args, **kwargs) [ 726.043824] env[62503]: ERROR nova.compute.manager [instance: c1693c1f-6497-429c-a7f7-5bf5591684d1] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 726.043824] env[62503]: ERROR nova.compute.manager [instance: c1693c1f-6497-429c-a7f7-5bf5591684d1] return func(*args, **kwargs) [ 726.043824] env[62503]: ERROR nova.compute.manager [instance: c1693c1f-6497-429c-a7f7-5bf5591684d1] File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 726.043824] env[62503]: ERROR nova.compute.manager [instance: c1693c1f-6497-429c-a7f7-5bf5591684d1] raise e [ 726.043824] env[62503]: ERROR nova.compute.manager [instance: c1693c1f-6497-429c-a7f7-5bf5591684d1] File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 726.043824] env[62503]: ERROR nova.compute.manager [instance: c1693c1f-6497-429c-a7f7-5bf5591684d1] nwinfo = self.network_api.allocate_for_instance( [ 726.043824] env[62503]: ERROR nova.compute.manager [instance: c1693c1f-6497-429c-a7f7-5bf5591684d1] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 726.043824] env[62503]: ERROR nova.compute.manager [instance: c1693c1f-6497-429c-a7f7-5bf5591684d1] created_port_ids = self._update_ports_for_instance( [ 726.043824] env[62503]: ERROR nova.compute.manager [instance: c1693c1f-6497-429c-a7f7-5bf5591684d1] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 726.043824] env[62503]: ERROR nova.compute.manager [instance: c1693c1f-6497-429c-a7f7-5bf5591684d1] with excutils.save_and_reraise_exception(): [ 726.043824] env[62503]: ERROR nova.compute.manager [instance: c1693c1f-6497-429c-a7f7-5bf5591684d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 726.044216] env[62503]: ERROR nova.compute.manager [instance: c1693c1f-6497-429c-a7f7-5bf5591684d1] self.force_reraise() [ 726.044216] env[62503]: ERROR nova.compute.manager [instance: c1693c1f-6497-429c-a7f7-5bf5591684d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 726.044216] env[62503]: ERROR nova.compute.manager [instance: c1693c1f-6497-429c-a7f7-5bf5591684d1] raise self.value [ 726.044216] env[62503]: ERROR nova.compute.manager [instance: c1693c1f-6497-429c-a7f7-5bf5591684d1] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 726.044216] env[62503]: ERROR nova.compute.manager [instance: c1693c1f-6497-429c-a7f7-5bf5591684d1] updated_port = self._update_port( [ 726.044216] env[62503]: ERROR nova.compute.manager [instance: c1693c1f-6497-429c-a7f7-5bf5591684d1] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 726.044216] env[62503]: ERROR nova.compute.manager [instance: c1693c1f-6497-429c-a7f7-5bf5591684d1] _ensure_no_port_binding_failure(port) [ 726.044216] env[62503]: ERROR nova.compute.manager [instance: c1693c1f-6497-429c-a7f7-5bf5591684d1] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 726.044216] env[62503]: ERROR nova.compute.manager [instance: c1693c1f-6497-429c-a7f7-5bf5591684d1] raise exception.PortBindingFailed(port_id=port['id']) [ 726.044216] env[62503]: ERROR nova.compute.manager [instance: c1693c1f-6497-429c-a7f7-5bf5591684d1] nova.exception.PortBindingFailed: Binding failed for port 4e5bf34c-de80-4cd1-9d3f-8a7735e994a3, please check neutron logs for more information. [ 726.044216] env[62503]: ERROR nova.compute.manager [instance: c1693c1f-6497-429c-a7f7-5bf5591684d1] [ 726.044216] env[62503]: INFO nova.compute.manager [None req-98b7d7bf-9e47-4fe2-b967-b3c7085ad444 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: c1693c1f-6497-429c-a7f7-5bf5591684d1] Terminating instance [ 726.046151] env[62503]: DEBUG oslo_concurrency.lockutils [None req-98b7d7bf-9e47-4fe2-b967-b3c7085ad444 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Acquiring lock "refresh_cache-c1693c1f-6497-429c-a7f7-5bf5591684d1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 726.046151] env[62503]: DEBUG oslo_concurrency.lockutils [None req-98b7d7bf-9e47-4fe2-b967-b3c7085ad444 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Acquired lock "refresh_cache-c1693c1f-6497-429c-a7f7-5bf5591684d1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 726.046151] env[62503]: DEBUG nova.network.neutron [None req-98b7d7bf-9e47-4fe2-b967-b3c7085ad444 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: c1693c1f-6497-429c-a7f7-5bf5591684d1] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 726.391713] env[62503]: DEBUG nova.network.neutron [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] [instance: 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 726.477698] env[62503]: DEBUG nova.network.neutron [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] [instance: 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 726.523056] env[62503]: INFO nova.compute.manager [-] [instance: 0b40f385-db0a-460c-b7fd-47e4d6afbaf9] Took 1.04 seconds to deallocate network for instance. [ 726.525212] env[62503]: DEBUG nova.compute.claims [None req-ef39e50f-5ce8-4286-9c89-acf2f5f9a4d4 tempest-VolumesAdminNegativeTest-585860034 tempest-VolumesAdminNegativeTest-585860034-project-member] [instance: 0b40f385-db0a-460c-b7fd-47e4d6afbaf9] Aborting claim: {{(pid=62503) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 726.525426] env[62503]: DEBUG oslo_concurrency.lockutils [None req-ef39e50f-5ce8-4286-9c89-acf2f5f9a4d4 tempest-VolumesAdminNegativeTest-585860034 tempest-VolumesAdminNegativeTest-585860034-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 726.564945] env[62503]: DEBUG nova.network.neutron [None req-98b7d7bf-9e47-4fe2-b967-b3c7085ad444 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: c1693c1f-6497-429c-a7f7-5bf5591684d1] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 726.647166] env[62503]: DEBUG nova.network.neutron [None req-98b7d7bf-9e47-4fe2-b967-b3c7085ad444 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: c1693c1f-6497-429c-a7f7-5bf5591684d1] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 726.672376] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46a85937-fc6e-4d1b-8ef9-dde3b0365a97 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.680988] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2a8d7b9-5530-4c14-b88d-6cd38309a9b1 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.710217] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-682ae349-3099-4592-93f9-a547532ab1b5 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.717190] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1c67989-5dea-4059-9701-6eb4543d76d8 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.731368] env[62503]: DEBUG nova.compute.provider_tree [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 726.981069] env[62503]: DEBUG oslo_concurrency.lockutils [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] Releasing lock "refresh_cache-0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 726.981069] env[62503]: DEBUG nova.compute.manager [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62503) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3032}} [ 726.981069] env[62503]: DEBUG nova.compute.manager [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] [instance: 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 726.981069] env[62503]: DEBUG nova.network.neutron [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] [instance: 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 727.000434] env[62503]: DEBUG nova.network.neutron [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] [instance: 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 727.149979] env[62503]: DEBUG oslo_concurrency.lockutils [None req-98b7d7bf-9e47-4fe2-b967-b3c7085ad444 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Releasing lock "refresh_cache-c1693c1f-6497-429c-a7f7-5bf5591684d1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 727.150448] env[62503]: DEBUG nova.compute.manager [None req-98b7d7bf-9e47-4fe2-b967-b3c7085ad444 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: c1693c1f-6497-429c-a7f7-5bf5591684d1] Start destroying the instance on the hypervisor. {{(pid=62503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3156}} [ 727.150638] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-98b7d7bf-9e47-4fe2-b967-b3c7085ad444 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: c1693c1f-6497-429c-a7f7-5bf5591684d1] Destroying instance {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 727.150935] env[62503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1ffede98-07fa-4266-a6fd-9a705d1e884e {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.159775] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c62282bb-7f6f-4c89-86f9-0d82e8fede76 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.182173] env[62503]: WARNING nova.virt.vmwareapi.vmops [None req-98b7d7bf-9e47-4fe2-b967-b3c7085ad444 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: c1693c1f-6497-429c-a7f7-5bf5591684d1] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance c1693c1f-6497-429c-a7f7-5bf5591684d1 could not be found. [ 727.182344] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-98b7d7bf-9e47-4fe2-b967-b3c7085ad444 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: c1693c1f-6497-429c-a7f7-5bf5591684d1] Instance destroyed {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 727.182591] env[62503]: INFO nova.compute.manager [None req-98b7d7bf-9e47-4fe2-b967-b3c7085ad444 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: c1693c1f-6497-429c-a7f7-5bf5591684d1] Took 0.03 seconds to destroy the instance on the hypervisor. [ 727.182897] env[62503]: DEBUG oslo.service.loopingcall [None req-98b7d7bf-9e47-4fe2-b967-b3c7085ad444 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 727.183198] env[62503]: DEBUG nova.compute.manager [-] [instance: c1693c1f-6497-429c-a7f7-5bf5591684d1] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 727.183391] env[62503]: DEBUG nova.network.neutron [-] [instance: c1693c1f-6497-429c-a7f7-5bf5591684d1] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 727.196625] env[62503]: DEBUG nova.network.neutron [-] [instance: c1693c1f-6497-429c-a7f7-5bf5591684d1] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 727.234487] env[62503]: DEBUG nova.scheduler.client.report [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 727.285476] env[62503]: DEBUG nova.compute.manager [req-c063199c-f1db-413a-a215-1533492b28e5 req-2f63a654-4dd8-492c-aa11-d97e8b9b233d service nova] [instance: c1693c1f-6497-429c-a7f7-5bf5591684d1] Received event network-changed-4e5bf34c-de80-4cd1-9d3f-8a7735e994a3 {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 727.285476] env[62503]: DEBUG nova.compute.manager [req-c063199c-f1db-413a-a215-1533492b28e5 req-2f63a654-4dd8-492c-aa11-d97e8b9b233d service nova] [instance: c1693c1f-6497-429c-a7f7-5bf5591684d1] Refreshing instance network info cache due to event network-changed-4e5bf34c-de80-4cd1-9d3f-8a7735e994a3. {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11432}} [ 727.285638] env[62503]: DEBUG oslo_concurrency.lockutils [req-c063199c-f1db-413a-a215-1533492b28e5 req-2f63a654-4dd8-492c-aa11-d97e8b9b233d service nova] Acquiring lock "refresh_cache-c1693c1f-6497-429c-a7f7-5bf5591684d1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 727.285775] env[62503]: DEBUG oslo_concurrency.lockutils [req-c063199c-f1db-413a-a215-1533492b28e5 req-2f63a654-4dd8-492c-aa11-d97e8b9b233d service nova] Acquired lock "refresh_cache-c1693c1f-6497-429c-a7f7-5bf5591684d1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 727.285936] env[62503]: DEBUG nova.network.neutron [req-c063199c-f1db-413a-a215-1533492b28e5 req-2f63a654-4dd8-492c-aa11-d97e8b9b233d service nova] [instance: c1693c1f-6497-429c-a7f7-5bf5591684d1] Refreshing network info cache for port 4e5bf34c-de80-4cd1-9d3f-8a7735e994a3 {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 727.504643] env[62503]: DEBUG nova.network.neutron [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] [instance: 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 727.699131] env[62503]: DEBUG nova.network.neutron [-] [instance: c1693c1f-6497-429c-a7f7-5bf5591684d1] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 727.739487] env[62503]: DEBUG oslo_concurrency.lockutils [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.869s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 727.740175] env[62503]: ERROR nova.compute.manager [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] [instance: df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 9d3ab059-5e2e-4779-af52-242e031464dc, please check neutron logs for more information. [ 727.740175] env[62503]: ERROR nova.compute.manager [instance: df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36] Traceback (most recent call last): [ 727.740175] env[62503]: ERROR nova.compute.manager [instance: df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36] File "/opt/stack/nova/nova/compute/manager.py", line 2644, in _build_and_run_instance [ 727.740175] env[62503]: ERROR nova.compute.manager [instance: df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36] self.driver.spawn(context, instance, image_meta, [ 727.740175] env[62503]: ERROR nova.compute.manager [instance: df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 727.740175] env[62503]: ERROR nova.compute.manager [instance: df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36] self._vmops.spawn(context, instance, image_meta, injected_files, [ 727.740175] env[62503]: ERROR nova.compute.manager [instance: df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 727.740175] env[62503]: ERROR nova.compute.manager [instance: df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36] vm_ref = self.build_virtual_machine(instance, [ 727.740175] env[62503]: ERROR nova.compute.manager [instance: df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 727.740175] env[62503]: ERROR nova.compute.manager [instance: df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36] vif_infos = vmwarevif.get_vif_info(self._session, [ 727.740175] env[62503]: ERROR nova.compute.manager [instance: df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 727.740555] env[62503]: ERROR nova.compute.manager [instance: df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36] for vif in network_info: [ 727.740555] env[62503]: ERROR nova.compute.manager [instance: df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 727.740555] env[62503]: ERROR nova.compute.manager [instance: df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36] return self._sync_wrapper(fn, *args, **kwargs) [ 727.740555] env[62503]: ERROR nova.compute.manager [instance: df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 727.740555] env[62503]: ERROR nova.compute.manager [instance: df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36] self.wait() [ 727.740555] env[62503]: ERROR nova.compute.manager [instance: df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 727.740555] env[62503]: ERROR nova.compute.manager [instance: df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36] self[:] = self._gt.wait() [ 727.740555] env[62503]: ERROR nova.compute.manager [instance: df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 727.740555] env[62503]: ERROR nova.compute.manager [instance: df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36] return self._exit_event.wait() [ 727.740555] env[62503]: ERROR nova.compute.manager [instance: df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 727.740555] env[62503]: ERROR nova.compute.manager [instance: df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36] result = hub.switch() [ 727.740555] env[62503]: ERROR nova.compute.manager [instance: df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 727.740555] env[62503]: ERROR nova.compute.manager [instance: df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36] return self.greenlet.switch() [ 727.740949] env[62503]: ERROR nova.compute.manager [instance: df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 727.740949] env[62503]: ERROR nova.compute.manager [instance: df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36] result = function(*args, **kwargs) [ 727.740949] env[62503]: ERROR nova.compute.manager [instance: df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 727.740949] env[62503]: ERROR nova.compute.manager [instance: df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36] return func(*args, **kwargs) [ 727.740949] env[62503]: ERROR nova.compute.manager [instance: df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36] File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 727.740949] env[62503]: ERROR nova.compute.manager [instance: df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36] raise e [ 727.740949] env[62503]: ERROR nova.compute.manager [instance: df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36] File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 727.740949] env[62503]: ERROR nova.compute.manager [instance: df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36] nwinfo = self.network_api.allocate_for_instance( [ 727.740949] env[62503]: ERROR nova.compute.manager [instance: df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 727.740949] env[62503]: ERROR nova.compute.manager [instance: df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36] created_port_ids = self._update_ports_for_instance( [ 727.740949] env[62503]: ERROR nova.compute.manager [instance: df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 727.740949] env[62503]: ERROR nova.compute.manager [instance: df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36] with excutils.save_and_reraise_exception(): [ 727.740949] env[62503]: ERROR nova.compute.manager [instance: df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 727.741346] env[62503]: ERROR nova.compute.manager [instance: df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36] self.force_reraise() [ 727.741346] env[62503]: ERROR nova.compute.manager [instance: df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 727.741346] env[62503]: ERROR nova.compute.manager [instance: df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36] raise self.value [ 727.741346] env[62503]: ERROR nova.compute.manager [instance: df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 727.741346] env[62503]: ERROR nova.compute.manager [instance: df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36] updated_port = self._update_port( [ 727.741346] env[62503]: ERROR nova.compute.manager [instance: df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 727.741346] env[62503]: ERROR nova.compute.manager [instance: df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36] _ensure_no_port_binding_failure(port) [ 727.741346] env[62503]: ERROR nova.compute.manager [instance: df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 727.741346] env[62503]: ERROR nova.compute.manager [instance: df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36] raise exception.PortBindingFailed(port_id=port['id']) [ 727.741346] env[62503]: ERROR nova.compute.manager [instance: df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36] nova.exception.PortBindingFailed: Binding failed for port 9d3ab059-5e2e-4779-af52-242e031464dc, please check neutron logs for more information. [ 727.741346] env[62503]: ERROR nova.compute.manager [instance: df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36] [ 727.741678] env[62503]: DEBUG nova.compute.utils [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] [instance: df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36] Binding failed for port 9d3ab059-5e2e-4779-af52-242e031464dc, please check neutron logs for more information. {{(pid=62503) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 727.742021] env[62503]: DEBUG oslo_concurrency.lockutils [None req-e421dd54-c082-45de-8156-0a223999bd64 tempest-ServerRescueTestJSON-89698622 tempest-ServerRescueTestJSON-89698622-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.386s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 727.745081] env[62503]: DEBUG nova.compute.manager [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] [instance: df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36] Build of instance df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36 was re-scheduled: Binding failed for port 9d3ab059-5e2e-4779-af52-242e031464dc, please check neutron logs for more information. {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2483}} [ 727.745425] env[62503]: DEBUG nova.compute.manager [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] [instance: df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36] Unplugging VIFs for instance {{(pid=62503) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3009}} [ 727.745645] env[62503]: DEBUG oslo_concurrency.lockutils [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] Acquiring lock "refresh_cache-df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 727.745789] env[62503]: DEBUG oslo_concurrency.lockutils [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] Acquired lock "refresh_cache-df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 727.745940] env[62503]: DEBUG nova.network.neutron [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] [instance: df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 727.804024] env[62503]: DEBUG nova.network.neutron [req-c063199c-f1db-413a-a215-1533492b28e5 req-2f63a654-4dd8-492c-aa11-d97e8b9b233d service nova] [instance: c1693c1f-6497-429c-a7f7-5bf5591684d1] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 727.884224] env[62503]: DEBUG nova.network.neutron [req-c063199c-f1db-413a-a215-1533492b28e5 req-2f63a654-4dd8-492c-aa11-d97e8b9b233d service nova] [instance: c1693c1f-6497-429c-a7f7-5bf5591684d1] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 728.008411] env[62503]: INFO nova.compute.manager [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] [instance: 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9] Took 1.03 seconds to deallocate network for instance. [ 728.202218] env[62503]: INFO nova.compute.manager [-] [instance: c1693c1f-6497-429c-a7f7-5bf5591684d1] Took 1.02 seconds to deallocate network for instance. [ 728.205222] env[62503]: DEBUG nova.compute.claims [None req-98b7d7bf-9e47-4fe2-b967-b3c7085ad444 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: c1693c1f-6497-429c-a7f7-5bf5591684d1] Aborting claim: {{(pid=62503) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 728.205403] env[62503]: DEBUG oslo_concurrency.lockutils [None req-98b7d7bf-9e47-4fe2-b967-b3c7085ad444 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 728.265730] env[62503]: DEBUG nova.network.neutron [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] [instance: df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 728.362570] env[62503]: DEBUG nova.network.neutron [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] [instance: df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 728.387520] env[62503]: DEBUG oslo_concurrency.lockutils [req-c063199c-f1db-413a-a215-1533492b28e5 req-2f63a654-4dd8-492c-aa11-d97e8b9b233d service nova] Releasing lock "refresh_cache-c1693c1f-6497-429c-a7f7-5bf5591684d1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 728.387520] env[62503]: DEBUG nova.compute.manager [req-c063199c-f1db-413a-a215-1533492b28e5 req-2f63a654-4dd8-492c-aa11-d97e8b9b233d service nova] [instance: c1693c1f-6497-429c-a7f7-5bf5591684d1] Received event network-vif-deleted-4e5bf34c-de80-4cd1-9d3f-8a7735e994a3 {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 728.556984] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d348a00-225d-47e8-a75c-618191047f82 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.564877] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94b93b52-f159-47b1-90f5-b6e0488b4f8e {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.594238] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ffdf789-09aa-4dcf-b131-6e467bfa50b4 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.601714] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f4c598f-c032-4c93-b9f8-dedc3ede3ec1 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.614572] env[62503]: DEBUG nova.compute.provider_tree [None req-e421dd54-c082-45de-8156-0a223999bd64 tempest-ServerRescueTestJSON-89698622 tempest-ServerRescueTestJSON-89698622-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 728.867681] env[62503]: DEBUG oslo_concurrency.lockutils [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] Releasing lock "refresh_cache-df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 728.867932] env[62503]: DEBUG nova.compute.manager [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62503) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3032}} [ 728.868113] env[62503]: DEBUG nova.compute.manager [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] [instance: df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 728.868281] env[62503]: DEBUG nova.network.neutron [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] [instance: df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 728.882270] env[62503]: DEBUG nova.network.neutron [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] [instance: df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 729.036204] env[62503]: INFO nova.scheduler.client.report [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] Deleted allocations for instance 0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9 [ 729.117905] env[62503]: DEBUG nova.scheduler.client.report [None req-e421dd54-c082-45de-8156-0a223999bd64 tempest-ServerRescueTestJSON-89698622 tempest-ServerRescueTestJSON-89698622-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 729.384619] env[62503]: DEBUG nova.network.neutron [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] [instance: df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 729.544399] env[62503]: DEBUG oslo_concurrency.lockutils [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] Lock "0cdafe1b-c5b5-4db1-856c-dfb2fb5865a9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 135.951s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 729.622990] env[62503]: DEBUG oslo_concurrency.lockutils [None req-e421dd54-c082-45de-8156-0a223999bd64 tempest-ServerRescueTestJSON-89698622 tempest-ServerRescueTestJSON-89698622-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.881s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 729.623656] env[62503]: ERROR nova.compute.manager [None req-e421dd54-c082-45de-8156-0a223999bd64 tempest-ServerRescueTestJSON-89698622 tempest-ServerRescueTestJSON-89698622-project-member] [instance: d4930731-7333-426c-a2fc-a732d351a0f0] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 4a362122-9a64-4fbc-91b9-dff850e54b53, please check neutron logs for more information. [ 729.623656] env[62503]: ERROR nova.compute.manager [instance: d4930731-7333-426c-a2fc-a732d351a0f0] Traceback (most recent call last): [ 729.623656] env[62503]: ERROR nova.compute.manager [instance: d4930731-7333-426c-a2fc-a732d351a0f0] File "/opt/stack/nova/nova/compute/manager.py", line 2644, in _build_and_run_instance [ 729.623656] env[62503]: ERROR nova.compute.manager [instance: d4930731-7333-426c-a2fc-a732d351a0f0] self.driver.spawn(context, instance, image_meta, [ 729.623656] env[62503]: ERROR nova.compute.manager [instance: d4930731-7333-426c-a2fc-a732d351a0f0] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 729.623656] env[62503]: ERROR nova.compute.manager [instance: d4930731-7333-426c-a2fc-a732d351a0f0] self._vmops.spawn(context, instance, image_meta, injected_files, [ 729.623656] env[62503]: ERROR nova.compute.manager [instance: d4930731-7333-426c-a2fc-a732d351a0f0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 729.623656] env[62503]: ERROR nova.compute.manager [instance: d4930731-7333-426c-a2fc-a732d351a0f0] vm_ref = self.build_virtual_machine(instance, [ 729.623656] env[62503]: ERROR nova.compute.manager [instance: d4930731-7333-426c-a2fc-a732d351a0f0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 729.623656] env[62503]: ERROR nova.compute.manager [instance: d4930731-7333-426c-a2fc-a732d351a0f0] vif_infos = vmwarevif.get_vif_info(self._session, [ 729.623656] env[62503]: ERROR nova.compute.manager [instance: d4930731-7333-426c-a2fc-a732d351a0f0] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 729.623974] env[62503]: ERROR nova.compute.manager [instance: d4930731-7333-426c-a2fc-a732d351a0f0] for vif in network_info: [ 729.623974] env[62503]: ERROR nova.compute.manager [instance: d4930731-7333-426c-a2fc-a732d351a0f0] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 729.623974] env[62503]: ERROR nova.compute.manager [instance: d4930731-7333-426c-a2fc-a732d351a0f0] return self._sync_wrapper(fn, *args, **kwargs) [ 729.623974] env[62503]: ERROR nova.compute.manager [instance: d4930731-7333-426c-a2fc-a732d351a0f0] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 729.623974] env[62503]: ERROR nova.compute.manager [instance: d4930731-7333-426c-a2fc-a732d351a0f0] self.wait() [ 729.623974] env[62503]: ERROR nova.compute.manager [instance: d4930731-7333-426c-a2fc-a732d351a0f0] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 729.623974] env[62503]: ERROR nova.compute.manager [instance: d4930731-7333-426c-a2fc-a732d351a0f0] self[:] = self._gt.wait() [ 729.623974] env[62503]: ERROR nova.compute.manager [instance: d4930731-7333-426c-a2fc-a732d351a0f0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 729.623974] env[62503]: ERROR nova.compute.manager [instance: d4930731-7333-426c-a2fc-a732d351a0f0] return self._exit_event.wait() [ 729.623974] env[62503]: ERROR nova.compute.manager [instance: d4930731-7333-426c-a2fc-a732d351a0f0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 729.623974] env[62503]: ERROR nova.compute.manager [instance: d4930731-7333-426c-a2fc-a732d351a0f0] result = hub.switch() [ 729.623974] env[62503]: ERROR nova.compute.manager [instance: d4930731-7333-426c-a2fc-a732d351a0f0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 729.623974] env[62503]: ERROR nova.compute.manager [instance: d4930731-7333-426c-a2fc-a732d351a0f0] return self.greenlet.switch() [ 729.624335] env[62503]: ERROR nova.compute.manager [instance: d4930731-7333-426c-a2fc-a732d351a0f0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 729.624335] env[62503]: ERROR nova.compute.manager [instance: d4930731-7333-426c-a2fc-a732d351a0f0] result = function(*args, **kwargs) [ 729.624335] env[62503]: ERROR nova.compute.manager [instance: d4930731-7333-426c-a2fc-a732d351a0f0] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 729.624335] env[62503]: ERROR nova.compute.manager [instance: d4930731-7333-426c-a2fc-a732d351a0f0] return func(*args, **kwargs) [ 729.624335] env[62503]: ERROR nova.compute.manager [instance: d4930731-7333-426c-a2fc-a732d351a0f0] File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 729.624335] env[62503]: ERROR nova.compute.manager [instance: d4930731-7333-426c-a2fc-a732d351a0f0] raise e [ 729.624335] env[62503]: ERROR nova.compute.manager [instance: d4930731-7333-426c-a2fc-a732d351a0f0] File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 729.624335] env[62503]: ERROR nova.compute.manager [instance: d4930731-7333-426c-a2fc-a732d351a0f0] nwinfo = self.network_api.allocate_for_instance( [ 729.624335] env[62503]: ERROR nova.compute.manager [instance: d4930731-7333-426c-a2fc-a732d351a0f0] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 729.624335] env[62503]: ERROR nova.compute.manager [instance: d4930731-7333-426c-a2fc-a732d351a0f0] created_port_ids = self._update_ports_for_instance( [ 729.624335] env[62503]: ERROR nova.compute.manager [instance: d4930731-7333-426c-a2fc-a732d351a0f0] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 729.624335] env[62503]: ERROR nova.compute.manager [instance: d4930731-7333-426c-a2fc-a732d351a0f0] with excutils.save_and_reraise_exception(): [ 729.624335] env[62503]: ERROR nova.compute.manager [instance: d4930731-7333-426c-a2fc-a732d351a0f0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 729.624789] env[62503]: ERROR nova.compute.manager [instance: d4930731-7333-426c-a2fc-a732d351a0f0] self.force_reraise() [ 729.624789] env[62503]: ERROR nova.compute.manager [instance: d4930731-7333-426c-a2fc-a732d351a0f0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 729.624789] env[62503]: ERROR nova.compute.manager [instance: d4930731-7333-426c-a2fc-a732d351a0f0] raise self.value [ 729.624789] env[62503]: ERROR nova.compute.manager [instance: d4930731-7333-426c-a2fc-a732d351a0f0] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 729.624789] env[62503]: ERROR nova.compute.manager [instance: d4930731-7333-426c-a2fc-a732d351a0f0] updated_port = self._update_port( [ 729.624789] env[62503]: ERROR nova.compute.manager [instance: d4930731-7333-426c-a2fc-a732d351a0f0] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 729.624789] env[62503]: ERROR nova.compute.manager [instance: d4930731-7333-426c-a2fc-a732d351a0f0] _ensure_no_port_binding_failure(port) [ 729.624789] env[62503]: ERROR nova.compute.manager [instance: d4930731-7333-426c-a2fc-a732d351a0f0] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 729.624789] env[62503]: ERROR nova.compute.manager [instance: d4930731-7333-426c-a2fc-a732d351a0f0] raise exception.PortBindingFailed(port_id=port['id']) [ 729.624789] env[62503]: ERROR nova.compute.manager [instance: d4930731-7333-426c-a2fc-a732d351a0f0] nova.exception.PortBindingFailed: Binding failed for port 4a362122-9a64-4fbc-91b9-dff850e54b53, please check neutron logs for more information. [ 729.624789] env[62503]: ERROR nova.compute.manager [instance: d4930731-7333-426c-a2fc-a732d351a0f0] [ 729.625122] env[62503]: DEBUG nova.compute.utils [None req-e421dd54-c082-45de-8156-0a223999bd64 tempest-ServerRescueTestJSON-89698622 tempest-ServerRescueTestJSON-89698622-project-member] [instance: d4930731-7333-426c-a2fc-a732d351a0f0] Binding failed for port 4a362122-9a64-4fbc-91b9-dff850e54b53, please check neutron logs for more information. {{(pid=62503) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 729.625543] env[62503]: DEBUG oslo_concurrency.lockutils [None req-3e326ade-9b42-48b9-9ec9-471f7156ba7e tempest-AttachInterfacesV270Test-1411655473 tempest-AttachInterfacesV270Test-1411655473-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.574s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 729.628976] env[62503]: DEBUG nova.compute.manager [None req-e421dd54-c082-45de-8156-0a223999bd64 tempest-ServerRescueTestJSON-89698622 tempest-ServerRescueTestJSON-89698622-project-member] [instance: d4930731-7333-426c-a2fc-a732d351a0f0] Build of instance d4930731-7333-426c-a2fc-a732d351a0f0 was re-scheduled: Binding failed for port 4a362122-9a64-4fbc-91b9-dff850e54b53, please check neutron logs for more information. {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2483}} [ 729.629445] env[62503]: DEBUG nova.compute.manager [None req-e421dd54-c082-45de-8156-0a223999bd64 tempest-ServerRescueTestJSON-89698622 tempest-ServerRescueTestJSON-89698622-project-member] [instance: d4930731-7333-426c-a2fc-a732d351a0f0] Unplugging VIFs for instance {{(pid=62503) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3009}} [ 729.629686] env[62503]: DEBUG oslo_concurrency.lockutils [None req-e421dd54-c082-45de-8156-0a223999bd64 tempest-ServerRescueTestJSON-89698622 tempest-ServerRescueTestJSON-89698622-project-member] Acquiring lock "refresh_cache-d4930731-7333-426c-a2fc-a732d351a0f0" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 729.629833] env[62503]: DEBUG oslo_concurrency.lockutils [None req-e421dd54-c082-45de-8156-0a223999bd64 tempest-ServerRescueTestJSON-89698622 tempest-ServerRescueTestJSON-89698622-project-member] Acquired lock "refresh_cache-d4930731-7333-426c-a2fc-a732d351a0f0" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 729.630015] env[62503]: DEBUG nova.network.neutron [None req-e421dd54-c082-45de-8156-0a223999bd64 tempest-ServerRescueTestJSON-89698622 tempest-ServerRescueTestJSON-89698622-project-member] [instance: d4930731-7333-426c-a2fc-a732d351a0f0] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 729.887715] env[62503]: INFO nova.compute.manager [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] [instance: df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36] Took 1.02 seconds to deallocate network for instance. [ 730.047546] env[62503]: DEBUG nova.compute.manager [None req-bf654c76-1dff-4311-afb8-80d00a5369c0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: 47d67edd-0860-49a6-ab7e-0511cffb82ae] Starting instance... {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 730.150637] env[62503]: DEBUG nova.network.neutron [None req-e421dd54-c082-45de-8156-0a223999bd64 tempest-ServerRescueTestJSON-89698622 tempest-ServerRescueTestJSON-89698622-project-member] [instance: d4930731-7333-426c-a2fc-a732d351a0f0] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 730.242914] env[62503]: DEBUG nova.network.neutron [None req-e421dd54-c082-45de-8156-0a223999bd64 tempest-ServerRescueTestJSON-89698622 tempest-ServerRescueTestJSON-89698622-project-member] [instance: d4930731-7333-426c-a2fc-a732d351a0f0] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 730.426457] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b308ad0-6656-431b-b79e-1e13a5a92c5b {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.434896] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53ebd6ec-f471-44e4-98dd-1f3644bdbc85 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.464679] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2249d997-b04f-4f31-848b-b0e632f220b0 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.474025] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba740fab-4ed1-4118-8c66-770330f120f0 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.486537] env[62503]: DEBUG nova.compute.provider_tree [None req-3e326ade-9b42-48b9-9ec9-471f7156ba7e tempest-AttachInterfacesV270Test-1411655473 tempest-AttachInterfacesV270Test-1411655473-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 730.565801] env[62503]: DEBUG oslo_concurrency.lockutils [None req-bf654c76-1dff-4311-afb8-80d00a5369c0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 730.745762] env[62503]: DEBUG oslo_concurrency.lockutils [None req-e421dd54-c082-45de-8156-0a223999bd64 tempest-ServerRescueTestJSON-89698622 tempest-ServerRescueTestJSON-89698622-project-member] Releasing lock "refresh_cache-d4930731-7333-426c-a2fc-a732d351a0f0" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 730.745762] env[62503]: DEBUG nova.compute.manager [None req-e421dd54-c082-45de-8156-0a223999bd64 tempest-ServerRescueTestJSON-89698622 tempest-ServerRescueTestJSON-89698622-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62503) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3032}} [ 730.745899] env[62503]: DEBUG nova.compute.manager [None req-e421dd54-c082-45de-8156-0a223999bd64 tempest-ServerRescueTestJSON-89698622 tempest-ServerRescueTestJSON-89698622-project-member] [instance: d4930731-7333-426c-a2fc-a732d351a0f0] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 730.747750] env[62503]: DEBUG nova.network.neutron [None req-e421dd54-c082-45de-8156-0a223999bd64 tempest-ServerRescueTestJSON-89698622 tempest-ServerRescueTestJSON-89698622-project-member] [instance: d4930731-7333-426c-a2fc-a732d351a0f0] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 730.760672] env[62503]: DEBUG nova.network.neutron [None req-e421dd54-c082-45de-8156-0a223999bd64 tempest-ServerRescueTestJSON-89698622 tempest-ServerRescueTestJSON-89698622-project-member] [instance: d4930731-7333-426c-a2fc-a732d351a0f0] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 730.916020] env[62503]: INFO nova.scheduler.client.report [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] Deleted allocations for instance df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36 [ 730.989526] env[62503]: DEBUG nova.scheduler.client.report [None req-3e326ade-9b42-48b9-9ec9-471f7156ba7e tempest-AttachInterfacesV270Test-1411655473 tempest-AttachInterfacesV270Test-1411655473-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 731.263211] env[62503]: DEBUG nova.network.neutron [None req-e421dd54-c082-45de-8156-0a223999bd64 tempest-ServerRescueTestJSON-89698622 tempest-ServerRescueTestJSON-89698622-project-member] [instance: d4930731-7333-426c-a2fc-a732d351a0f0] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 731.423505] env[62503]: DEBUG oslo_concurrency.lockutils [None req-28bdbbfe-b18e-4d2e-b04d-d0fda11d094c tempest-ListServersNegativeTestJSON-1720270648 tempest-ListServersNegativeTestJSON-1720270648-project-member] Lock "df6ad6f4-2da5-4cad-bb6a-3b7d9ff6bb36" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 137.750s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 731.494031] env[62503]: DEBUG oslo_concurrency.lockutils [None req-3e326ade-9b42-48b9-9ec9-471f7156ba7e tempest-AttachInterfacesV270Test-1411655473 tempest-AttachInterfacesV270Test-1411655473-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.868s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 731.494667] env[62503]: ERROR nova.compute.manager [None req-3e326ade-9b42-48b9-9ec9-471f7156ba7e tempest-AttachInterfacesV270Test-1411655473 tempest-AttachInterfacesV270Test-1411655473-project-member] [instance: 9bda2d4c-38c0-49ba-9a69-402869ff6a65] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 81178767-32f1-432a-b245-c632c7753243, please check neutron logs for more information. [ 731.494667] env[62503]: ERROR nova.compute.manager [instance: 9bda2d4c-38c0-49ba-9a69-402869ff6a65] Traceback (most recent call last): [ 731.494667] env[62503]: ERROR nova.compute.manager [instance: 9bda2d4c-38c0-49ba-9a69-402869ff6a65] File "/opt/stack/nova/nova/compute/manager.py", line 2644, in _build_and_run_instance [ 731.494667] env[62503]: ERROR nova.compute.manager [instance: 9bda2d4c-38c0-49ba-9a69-402869ff6a65] self.driver.spawn(context, instance, image_meta, [ 731.494667] env[62503]: ERROR nova.compute.manager [instance: 9bda2d4c-38c0-49ba-9a69-402869ff6a65] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 731.494667] env[62503]: ERROR nova.compute.manager [instance: 9bda2d4c-38c0-49ba-9a69-402869ff6a65] self._vmops.spawn(context, instance, image_meta, injected_files, [ 731.494667] env[62503]: ERROR nova.compute.manager [instance: 9bda2d4c-38c0-49ba-9a69-402869ff6a65] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 731.494667] env[62503]: ERROR nova.compute.manager [instance: 9bda2d4c-38c0-49ba-9a69-402869ff6a65] vm_ref = self.build_virtual_machine(instance, [ 731.494667] env[62503]: ERROR nova.compute.manager [instance: 9bda2d4c-38c0-49ba-9a69-402869ff6a65] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 731.494667] env[62503]: ERROR nova.compute.manager [instance: 9bda2d4c-38c0-49ba-9a69-402869ff6a65] vif_infos = vmwarevif.get_vif_info(self._session, [ 731.494667] env[62503]: ERROR nova.compute.manager [instance: 9bda2d4c-38c0-49ba-9a69-402869ff6a65] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 731.495069] env[62503]: ERROR nova.compute.manager [instance: 9bda2d4c-38c0-49ba-9a69-402869ff6a65] for vif in network_info: [ 731.495069] env[62503]: ERROR nova.compute.manager [instance: 9bda2d4c-38c0-49ba-9a69-402869ff6a65] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 731.495069] env[62503]: ERROR nova.compute.manager [instance: 9bda2d4c-38c0-49ba-9a69-402869ff6a65] return self._sync_wrapper(fn, *args, **kwargs) [ 731.495069] env[62503]: ERROR nova.compute.manager [instance: 9bda2d4c-38c0-49ba-9a69-402869ff6a65] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 731.495069] env[62503]: ERROR nova.compute.manager [instance: 9bda2d4c-38c0-49ba-9a69-402869ff6a65] self.wait() [ 731.495069] env[62503]: ERROR nova.compute.manager [instance: 9bda2d4c-38c0-49ba-9a69-402869ff6a65] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 731.495069] env[62503]: ERROR nova.compute.manager [instance: 9bda2d4c-38c0-49ba-9a69-402869ff6a65] self[:] = self._gt.wait() [ 731.495069] env[62503]: ERROR nova.compute.manager [instance: 9bda2d4c-38c0-49ba-9a69-402869ff6a65] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 731.495069] env[62503]: ERROR nova.compute.manager [instance: 9bda2d4c-38c0-49ba-9a69-402869ff6a65] return self._exit_event.wait() [ 731.495069] env[62503]: ERROR nova.compute.manager [instance: 9bda2d4c-38c0-49ba-9a69-402869ff6a65] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 731.495069] env[62503]: ERROR nova.compute.manager [instance: 9bda2d4c-38c0-49ba-9a69-402869ff6a65] current.throw(*self._exc) [ 731.495069] env[62503]: ERROR nova.compute.manager [instance: 9bda2d4c-38c0-49ba-9a69-402869ff6a65] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 731.495069] env[62503]: ERROR nova.compute.manager [instance: 9bda2d4c-38c0-49ba-9a69-402869ff6a65] result = function(*args, **kwargs) [ 731.495457] env[62503]: ERROR nova.compute.manager [instance: 9bda2d4c-38c0-49ba-9a69-402869ff6a65] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 731.495457] env[62503]: ERROR nova.compute.manager [instance: 9bda2d4c-38c0-49ba-9a69-402869ff6a65] return func(*args, **kwargs) [ 731.495457] env[62503]: ERROR nova.compute.manager [instance: 9bda2d4c-38c0-49ba-9a69-402869ff6a65] File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 731.495457] env[62503]: ERROR nova.compute.manager [instance: 9bda2d4c-38c0-49ba-9a69-402869ff6a65] raise e [ 731.495457] env[62503]: ERROR nova.compute.manager [instance: 9bda2d4c-38c0-49ba-9a69-402869ff6a65] File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 731.495457] env[62503]: ERROR nova.compute.manager [instance: 9bda2d4c-38c0-49ba-9a69-402869ff6a65] nwinfo = self.network_api.allocate_for_instance( [ 731.495457] env[62503]: ERROR nova.compute.manager [instance: 9bda2d4c-38c0-49ba-9a69-402869ff6a65] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 731.495457] env[62503]: ERROR nova.compute.manager [instance: 9bda2d4c-38c0-49ba-9a69-402869ff6a65] created_port_ids = self._update_ports_for_instance( [ 731.495457] env[62503]: ERROR nova.compute.manager [instance: 9bda2d4c-38c0-49ba-9a69-402869ff6a65] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 731.495457] env[62503]: ERROR nova.compute.manager [instance: 9bda2d4c-38c0-49ba-9a69-402869ff6a65] with excutils.save_and_reraise_exception(): [ 731.495457] env[62503]: ERROR nova.compute.manager [instance: 9bda2d4c-38c0-49ba-9a69-402869ff6a65] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 731.495457] env[62503]: ERROR nova.compute.manager [instance: 9bda2d4c-38c0-49ba-9a69-402869ff6a65] self.force_reraise() [ 731.495457] env[62503]: ERROR nova.compute.manager [instance: 9bda2d4c-38c0-49ba-9a69-402869ff6a65] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 731.496150] env[62503]: ERROR nova.compute.manager [instance: 9bda2d4c-38c0-49ba-9a69-402869ff6a65] raise self.value [ 731.496150] env[62503]: ERROR nova.compute.manager [instance: 9bda2d4c-38c0-49ba-9a69-402869ff6a65] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 731.496150] env[62503]: ERROR nova.compute.manager [instance: 9bda2d4c-38c0-49ba-9a69-402869ff6a65] updated_port = self._update_port( [ 731.496150] env[62503]: ERROR nova.compute.manager [instance: 9bda2d4c-38c0-49ba-9a69-402869ff6a65] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 731.496150] env[62503]: ERROR nova.compute.manager [instance: 9bda2d4c-38c0-49ba-9a69-402869ff6a65] _ensure_no_port_binding_failure(port) [ 731.496150] env[62503]: ERROR nova.compute.manager [instance: 9bda2d4c-38c0-49ba-9a69-402869ff6a65] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 731.496150] env[62503]: ERROR nova.compute.manager [instance: 9bda2d4c-38c0-49ba-9a69-402869ff6a65] raise exception.PortBindingFailed(port_id=port['id']) [ 731.496150] env[62503]: ERROR nova.compute.manager [instance: 9bda2d4c-38c0-49ba-9a69-402869ff6a65] nova.exception.PortBindingFailed: Binding failed for port 81178767-32f1-432a-b245-c632c7753243, please check neutron logs for more information. [ 731.496150] env[62503]: ERROR nova.compute.manager [instance: 9bda2d4c-38c0-49ba-9a69-402869ff6a65] [ 731.496150] env[62503]: DEBUG nova.compute.utils [None req-3e326ade-9b42-48b9-9ec9-471f7156ba7e tempest-AttachInterfacesV270Test-1411655473 tempest-AttachInterfacesV270Test-1411655473-project-member] [instance: 9bda2d4c-38c0-49ba-9a69-402869ff6a65] Binding failed for port 81178767-32f1-432a-b245-c632c7753243, please check neutron logs for more information. {{(pid=62503) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 731.496713] env[62503]: DEBUG oslo_concurrency.lockutils [None req-9a47e67e-9a92-496f-b283-39f380d35bee tempest-ServerAddressesTestJSON-1019310862 tempest-ServerAddressesTestJSON-1019310862-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.919s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 731.499932] env[62503]: DEBUG nova.compute.manager [None req-3e326ade-9b42-48b9-9ec9-471f7156ba7e tempest-AttachInterfacesV270Test-1411655473 tempest-AttachInterfacesV270Test-1411655473-project-member] [instance: 9bda2d4c-38c0-49ba-9a69-402869ff6a65] Build of instance 9bda2d4c-38c0-49ba-9a69-402869ff6a65 was re-scheduled: Binding failed for port 81178767-32f1-432a-b245-c632c7753243, please check neutron logs for more information. {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2483}} [ 731.500386] env[62503]: DEBUG nova.compute.manager [None req-3e326ade-9b42-48b9-9ec9-471f7156ba7e tempest-AttachInterfacesV270Test-1411655473 tempest-AttachInterfacesV270Test-1411655473-project-member] [instance: 9bda2d4c-38c0-49ba-9a69-402869ff6a65] Unplugging VIFs for instance {{(pid=62503) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3009}} [ 731.501030] env[62503]: DEBUG oslo_concurrency.lockutils [None req-3e326ade-9b42-48b9-9ec9-471f7156ba7e tempest-AttachInterfacesV270Test-1411655473 tempest-AttachInterfacesV270Test-1411655473-project-member] Acquiring lock "refresh_cache-9bda2d4c-38c0-49ba-9a69-402869ff6a65" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 731.501030] env[62503]: DEBUG oslo_concurrency.lockutils [None req-3e326ade-9b42-48b9-9ec9-471f7156ba7e tempest-AttachInterfacesV270Test-1411655473 tempest-AttachInterfacesV270Test-1411655473-project-member] Acquired lock "refresh_cache-9bda2d4c-38c0-49ba-9a69-402869ff6a65" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 731.501030] env[62503]: DEBUG nova.network.neutron [None req-3e326ade-9b42-48b9-9ec9-471f7156ba7e tempest-AttachInterfacesV270Test-1411655473 tempest-AttachInterfacesV270Test-1411655473-project-member] [instance: 9bda2d4c-38c0-49ba-9a69-402869ff6a65] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 731.765712] env[62503]: INFO nova.compute.manager [None req-e421dd54-c082-45de-8156-0a223999bd64 tempest-ServerRescueTestJSON-89698622 tempest-ServerRescueTestJSON-89698622-project-member] [instance: d4930731-7333-426c-a2fc-a732d351a0f0] Took 1.02 seconds to deallocate network for instance. [ 731.926402] env[62503]: DEBUG nova.compute.manager [None req-57a38f06-0431-4714-9351-3e81164f0fed tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] [instance: 141d7d04-0267-4e15-90ed-112ac8fb8c9b] Starting instance... {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 732.041562] env[62503]: DEBUG nova.network.neutron [None req-3e326ade-9b42-48b9-9ec9-471f7156ba7e tempest-AttachInterfacesV270Test-1411655473 tempest-AttachInterfacesV270Test-1411655473-project-member] [instance: 9bda2d4c-38c0-49ba-9a69-402869ff6a65] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 732.190737] env[62503]: DEBUG nova.network.neutron [None req-3e326ade-9b42-48b9-9ec9-471f7156ba7e tempest-AttachInterfacesV270Test-1411655473 tempest-AttachInterfacesV270Test-1411655473-project-member] [instance: 9bda2d4c-38c0-49ba-9a69-402869ff6a65] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 732.343700] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eaed0804-032d-4d78-b5bc-58b7ed619184 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.351738] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33cd3c93-6dac-406c-8703-d2b1f6ad33a8 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.383464] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f26ed7d0-0bda-4bf5-908c-56bf5e811764 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.390968] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65a3f740-10c4-47d8-9514-555d9e688e17 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.403930] env[62503]: DEBUG nova.compute.provider_tree [None req-9a47e67e-9a92-496f-b283-39f380d35bee tempest-ServerAddressesTestJSON-1019310862 tempest-ServerAddressesTestJSON-1019310862-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 732.444319] env[62503]: DEBUG oslo_concurrency.lockutils [None req-57a38f06-0431-4714-9351-3e81164f0fed tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 732.698038] env[62503]: DEBUG oslo_concurrency.lockutils [None req-3e326ade-9b42-48b9-9ec9-471f7156ba7e tempest-AttachInterfacesV270Test-1411655473 tempest-AttachInterfacesV270Test-1411655473-project-member] Releasing lock "refresh_cache-9bda2d4c-38c0-49ba-9a69-402869ff6a65" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 732.698038] env[62503]: DEBUG nova.compute.manager [None req-3e326ade-9b42-48b9-9ec9-471f7156ba7e tempest-AttachInterfacesV270Test-1411655473 tempest-AttachInterfacesV270Test-1411655473-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62503) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3032}} [ 732.698038] env[62503]: DEBUG nova.compute.manager [None req-3e326ade-9b42-48b9-9ec9-471f7156ba7e tempest-AttachInterfacesV270Test-1411655473 tempest-AttachInterfacesV270Test-1411655473-project-member] [instance: 9bda2d4c-38c0-49ba-9a69-402869ff6a65] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 732.698038] env[62503]: DEBUG nova.network.neutron [None req-3e326ade-9b42-48b9-9ec9-471f7156ba7e tempest-AttachInterfacesV270Test-1411655473 tempest-AttachInterfacesV270Test-1411655473-project-member] [instance: 9bda2d4c-38c0-49ba-9a69-402869ff6a65] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 732.719018] env[62503]: DEBUG nova.network.neutron [None req-3e326ade-9b42-48b9-9ec9-471f7156ba7e tempest-AttachInterfacesV270Test-1411655473 tempest-AttachInterfacesV270Test-1411655473-project-member] [instance: 9bda2d4c-38c0-49ba-9a69-402869ff6a65] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 732.817191] env[62503]: INFO nova.scheduler.client.report [None req-e421dd54-c082-45de-8156-0a223999bd64 tempest-ServerRescueTestJSON-89698622 tempest-ServerRescueTestJSON-89698622-project-member] Deleted allocations for instance d4930731-7333-426c-a2fc-a732d351a0f0 [ 732.907173] env[62503]: DEBUG nova.scheduler.client.report [None req-9a47e67e-9a92-496f-b283-39f380d35bee tempest-ServerAddressesTestJSON-1019310862 tempest-ServerAddressesTestJSON-1019310862-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 733.222523] env[62503]: DEBUG nova.network.neutron [None req-3e326ade-9b42-48b9-9ec9-471f7156ba7e tempest-AttachInterfacesV270Test-1411655473 tempest-AttachInterfacesV270Test-1411655473-project-member] [instance: 9bda2d4c-38c0-49ba-9a69-402869ff6a65] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 733.331273] env[62503]: DEBUG oslo_concurrency.lockutils [None req-e421dd54-c082-45de-8156-0a223999bd64 tempest-ServerRescueTestJSON-89698622 tempest-ServerRescueTestJSON-89698622-project-member] Lock "d4930731-7333-426c-a2fc-a732d351a0f0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 138.006s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 733.412424] env[62503]: DEBUG oslo_concurrency.lockutils [None req-9a47e67e-9a92-496f-b283-39f380d35bee tempest-ServerAddressesTestJSON-1019310862 tempest-ServerAddressesTestJSON-1019310862-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.916s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 733.413218] env[62503]: ERROR nova.compute.manager [None req-9a47e67e-9a92-496f-b283-39f380d35bee tempest-ServerAddressesTestJSON-1019310862 tempest-ServerAddressesTestJSON-1019310862-project-member] [instance: ce8d9b01-e99d-4051-bd96-659692a436da] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 80bd40c8-505a-4b91-a56d-00f1135ef5cf, please check neutron logs for more information. [ 733.413218] env[62503]: ERROR nova.compute.manager [instance: ce8d9b01-e99d-4051-bd96-659692a436da] Traceback (most recent call last): [ 733.413218] env[62503]: ERROR nova.compute.manager [instance: ce8d9b01-e99d-4051-bd96-659692a436da] File "/opt/stack/nova/nova/compute/manager.py", line 2644, in _build_and_run_instance [ 733.413218] env[62503]: ERROR nova.compute.manager [instance: ce8d9b01-e99d-4051-bd96-659692a436da] self.driver.spawn(context, instance, image_meta, [ 733.413218] env[62503]: ERROR nova.compute.manager [instance: ce8d9b01-e99d-4051-bd96-659692a436da] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 733.413218] env[62503]: ERROR nova.compute.manager [instance: ce8d9b01-e99d-4051-bd96-659692a436da] self._vmops.spawn(context, instance, image_meta, injected_files, [ 733.413218] env[62503]: ERROR nova.compute.manager [instance: ce8d9b01-e99d-4051-bd96-659692a436da] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 733.413218] env[62503]: ERROR nova.compute.manager [instance: ce8d9b01-e99d-4051-bd96-659692a436da] vm_ref = self.build_virtual_machine(instance, [ 733.413218] env[62503]: ERROR nova.compute.manager [instance: ce8d9b01-e99d-4051-bd96-659692a436da] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 733.413218] env[62503]: ERROR nova.compute.manager [instance: ce8d9b01-e99d-4051-bd96-659692a436da] vif_infos = vmwarevif.get_vif_info(self._session, [ 733.413218] env[62503]: ERROR nova.compute.manager [instance: ce8d9b01-e99d-4051-bd96-659692a436da] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 733.413862] env[62503]: ERROR nova.compute.manager [instance: ce8d9b01-e99d-4051-bd96-659692a436da] for vif in network_info: [ 733.413862] env[62503]: ERROR nova.compute.manager [instance: ce8d9b01-e99d-4051-bd96-659692a436da] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 733.413862] env[62503]: ERROR nova.compute.manager [instance: ce8d9b01-e99d-4051-bd96-659692a436da] return self._sync_wrapper(fn, *args, **kwargs) [ 733.413862] env[62503]: ERROR nova.compute.manager [instance: ce8d9b01-e99d-4051-bd96-659692a436da] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 733.413862] env[62503]: ERROR nova.compute.manager [instance: ce8d9b01-e99d-4051-bd96-659692a436da] self.wait() [ 733.413862] env[62503]: ERROR nova.compute.manager [instance: ce8d9b01-e99d-4051-bd96-659692a436da] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 733.413862] env[62503]: ERROR nova.compute.manager [instance: ce8d9b01-e99d-4051-bd96-659692a436da] self[:] = self._gt.wait() [ 733.413862] env[62503]: ERROR nova.compute.manager [instance: ce8d9b01-e99d-4051-bd96-659692a436da] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 733.413862] env[62503]: ERROR nova.compute.manager [instance: ce8d9b01-e99d-4051-bd96-659692a436da] return self._exit_event.wait() [ 733.413862] env[62503]: ERROR nova.compute.manager [instance: ce8d9b01-e99d-4051-bd96-659692a436da] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 733.413862] env[62503]: ERROR nova.compute.manager [instance: ce8d9b01-e99d-4051-bd96-659692a436da] current.throw(*self._exc) [ 733.413862] env[62503]: ERROR nova.compute.manager [instance: ce8d9b01-e99d-4051-bd96-659692a436da] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 733.413862] env[62503]: ERROR nova.compute.manager [instance: ce8d9b01-e99d-4051-bd96-659692a436da] result = function(*args, **kwargs) [ 733.414515] env[62503]: ERROR nova.compute.manager [instance: ce8d9b01-e99d-4051-bd96-659692a436da] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 733.414515] env[62503]: ERROR nova.compute.manager [instance: ce8d9b01-e99d-4051-bd96-659692a436da] return func(*args, **kwargs) [ 733.414515] env[62503]: ERROR nova.compute.manager [instance: ce8d9b01-e99d-4051-bd96-659692a436da] File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 733.414515] env[62503]: ERROR nova.compute.manager [instance: ce8d9b01-e99d-4051-bd96-659692a436da] raise e [ 733.414515] env[62503]: ERROR nova.compute.manager [instance: ce8d9b01-e99d-4051-bd96-659692a436da] File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 733.414515] env[62503]: ERROR nova.compute.manager [instance: ce8d9b01-e99d-4051-bd96-659692a436da] nwinfo = self.network_api.allocate_for_instance( [ 733.414515] env[62503]: ERROR nova.compute.manager [instance: ce8d9b01-e99d-4051-bd96-659692a436da] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 733.414515] env[62503]: ERROR nova.compute.manager [instance: ce8d9b01-e99d-4051-bd96-659692a436da] created_port_ids = self._update_ports_for_instance( [ 733.414515] env[62503]: ERROR nova.compute.manager [instance: ce8d9b01-e99d-4051-bd96-659692a436da] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 733.414515] env[62503]: ERROR nova.compute.manager [instance: ce8d9b01-e99d-4051-bd96-659692a436da] with excutils.save_and_reraise_exception(): [ 733.414515] env[62503]: ERROR nova.compute.manager [instance: ce8d9b01-e99d-4051-bd96-659692a436da] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 733.414515] env[62503]: ERROR nova.compute.manager [instance: ce8d9b01-e99d-4051-bd96-659692a436da] self.force_reraise() [ 733.414515] env[62503]: ERROR nova.compute.manager [instance: ce8d9b01-e99d-4051-bd96-659692a436da] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 733.415162] env[62503]: ERROR nova.compute.manager [instance: ce8d9b01-e99d-4051-bd96-659692a436da] raise self.value [ 733.415162] env[62503]: ERROR nova.compute.manager [instance: ce8d9b01-e99d-4051-bd96-659692a436da] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 733.415162] env[62503]: ERROR nova.compute.manager [instance: ce8d9b01-e99d-4051-bd96-659692a436da] updated_port = self._update_port( [ 733.415162] env[62503]: ERROR nova.compute.manager [instance: ce8d9b01-e99d-4051-bd96-659692a436da] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 733.415162] env[62503]: ERROR nova.compute.manager [instance: ce8d9b01-e99d-4051-bd96-659692a436da] _ensure_no_port_binding_failure(port) [ 733.415162] env[62503]: ERROR nova.compute.manager [instance: ce8d9b01-e99d-4051-bd96-659692a436da] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 733.415162] env[62503]: ERROR nova.compute.manager [instance: ce8d9b01-e99d-4051-bd96-659692a436da] raise exception.PortBindingFailed(port_id=port['id']) [ 733.415162] env[62503]: ERROR nova.compute.manager [instance: ce8d9b01-e99d-4051-bd96-659692a436da] nova.exception.PortBindingFailed: Binding failed for port 80bd40c8-505a-4b91-a56d-00f1135ef5cf, please check neutron logs for more information. [ 733.415162] env[62503]: ERROR nova.compute.manager [instance: ce8d9b01-e99d-4051-bd96-659692a436da] [ 733.415162] env[62503]: DEBUG nova.compute.utils [None req-9a47e67e-9a92-496f-b283-39f380d35bee tempest-ServerAddressesTestJSON-1019310862 tempest-ServerAddressesTestJSON-1019310862-project-member] [instance: ce8d9b01-e99d-4051-bd96-659692a436da] Binding failed for port 80bd40c8-505a-4b91-a56d-00f1135ef5cf, please check neutron logs for more information. {{(pid=62503) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 733.415702] env[62503]: DEBUG oslo_concurrency.lockutils [None req-b3d1021d-dab1-43cf-b5d3-632b4017ba90 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.413s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 733.420658] env[62503]: DEBUG nova.compute.manager [None req-9a47e67e-9a92-496f-b283-39f380d35bee tempest-ServerAddressesTestJSON-1019310862 tempest-ServerAddressesTestJSON-1019310862-project-member] [instance: ce8d9b01-e99d-4051-bd96-659692a436da] Build of instance ce8d9b01-e99d-4051-bd96-659692a436da was re-scheduled: Binding failed for port 80bd40c8-505a-4b91-a56d-00f1135ef5cf, please check neutron logs for more information. {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2483}} [ 733.420658] env[62503]: DEBUG nova.compute.manager [None req-9a47e67e-9a92-496f-b283-39f380d35bee tempest-ServerAddressesTestJSON-1019310862 tempest-ServerAddressesTestJSON-1019310862-project-member] [instance: ce8d9b01-e99d-4051-bd96-659692a436da] Unplugging VIFs for instance {{(pid=62503) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3009}} [ 733.420658] env[62503]: DEBUG oslo_concurrency.lockutils [None req-9a47e67e-9a92-496f-b283-39f380d35bee tempest-ServerAddressesTestJSON-1019310862 tempest-ServerAddressesTestJSON-1019310862-project-member] Acquiring lock "refresh_cache-ce8d9b01-e99d-4051-bd96-659692a436da" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 733.420658] env[62503]: DEBUG oslo_concurrency.lockutils [None req-9a47e67e-9a92-496f-b283-39f380d35bee tempest-ServerAddressesTestJSON-1019310862 tempest-ServerAddressesTestJSON-1019310862-project-member] Acquired lock "refresh_cache-ce8d9b01-e99d-4051-bd96-659692a436da" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 733.420892] env[62503]: DEBUG nova.network.neutron [None req-9a47e67e-9a92-496f-b283-39f380d35bee tempest-ServerAddressesTestJSON-1019310862 tempest-ServerAddressesTestJSON-1019310862-project-member] [instance: ce8d9b01-e99d-4051-bd96-659692a436da] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 733.727926] env[62503]: INFO nova.compute.manager [None req-3e326ade-9b42-48b9-9ec9-471f7156ba7e tempest-AttachInterfacesV270Test-1411655473 tempest-AttachInterfacesV270Test-1411655473-project-member] [instance: 9bda2d4c-38c0-49ba-9a69-402869ff6a65] Took 1.03 seconds to deallocate network for instance. [ 733.840366] env[62503]: DEBUG nova.compute.manager [None req-8c63ebde-167f-4614-8d90-7622283ae73c tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] [instance: d4990c66-63d5-43b0-8187-2074c99ccde2] Starting instance... {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 733.951378] env[62503]: DEBUG nova.network.neutron [None req-9a47e67e-9a92-496f-b283-39f380d35bee tempest-ServerAddressesTestJSON-1019310862 tempest-ServerAddressesTestJSON-1019310862-project-member] [instance: ce8d9b01-e99d-4051-bd96-659692a436da] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 734.080539] env[62503]: DEBUG nova.network.neutron [None req-9a47e67e-9a92-496f-b283-39f380d35bee tempest-ServerAddressesTestJSON-1019310862 tempest-ServerAddressesTestJSON-1019310862-project-member] [instance: ce8d9b01-e99d-4051-bd96-659692a436da] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 734.268228] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e225d788-0ddc-492d-8295-e06c2a4ce12d {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.277075] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a95592fd-780a-4b88-ae78-73da4ae4b531 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.313160] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-798bd6bf-45a8-4194-8d1f-fe2df669247c {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.320662] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-530e78d9-2647-4877-9580-7d17905ced62 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.334033] env[62503]: DEBUG nova.compute.provider_tree [None req-b3d1021d-dab1-43cf-b5d3-632b4017ba90 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 734.362079] env[62503]: DEBUG oslo_concurrency.lockutils [None req-8c63ebde-167f-4614-8d90-7622283ae73c tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 734.583857] env[62503]: DEBUG oslo_concurrency.lockutils [None req-9a47e67e-9a92-496f-b283-39f380d35bee tempest-ServerAddressesTestJSON-1019310862 tempest-ServerAddressesTestJSON-1019310862-project-member] Releasing lock "refresh_cache-ce8d9b01-e99d-4051-bd96-659692a436da" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 734.584127] env[62503]: DEBUG nova.compute.manager [None req-9a47e67e-9a92-496f-b283-39f380d35bee tempest-ServerAddressesTestJSON-1019310862 tempest-ServerAddressesTestJSON-1019310862-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62503) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3032}} [ 734.584316] env[62503]: DEBUG nova.compute.manager [None req-9a47e67e-9a92-496f-b283-39f380d35bee tempest-ServerAddressesTestJSON-1019310862 tempest-ServerAddressesTestJSON-1019310862-project-member] [instance: ce8d9b01-e99d-4051-bd96-659692a436da] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 734.584485] env[62503]: DEBUG nova.network.neutron [None req-9a47e67e-9a92-496f-b283-39f380d35bee tempest-ServerAddressesTestJSON-1019310862 tempest-ServerAddressesTestJSON-1019310862-project-member] [instance: ce8d9b01-e99d-4051-bd96-659692a436da] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 734.614055] env[62503]: DEBUG nova.network.neutron [None req-9a47e67e-9a92-496f-b283-39f380d35bee tempest-ServerAddressesTestJSON-1019310862 tempest-ServerAddressesTestJSON-1019310862-project-member] [instance: ce8d9b01-e99d-4051-bd96-659692a436da] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 734.772612] env[62503]: INFO nova.scheduler.client.report [None req-3e326ade-9b42-48b9-9ec9-471f7156ba7e tempest-AttachInterfacesV270Test-1411655473 tempest-AttachInterfacesV270Test-1411655473-project-member] Deleted allocations for instance 9bda2d4c-38c0-49ba-9a69-402869ff6a65 [ 734.840243] env[62503]: DEBUG nova.scheduler.client.report [None req-b3d1021d-dab1-43cf-b5d3-632b4017ba90 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 735.116853] env[62503]: DEBUG nova.network.neutron [None req-9a47e67e-9a92-496f-b283-39f380d35bee tempest-ServerAddressesTestJSON-1019310862 tempest-ServerAddressesTestJSON-1019310862-project-member] [instance: ce8d9b01-e99d-4051-bd96-659692a436da] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 735.286605] env[62503]: DEBUG oslo_concurrency.lockutils [None req-3e326ade-9b42-48b9-9ec9-471f7156ba7e tempest-AttachInterfacesV270Test-1411655473 tempest-AttachInterfacesV270Test-1411655473-project-member] Lock "9bda2d4c-38c0-49ba-9a69-402869ff6a65" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 132.694s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 735.347213] env[62503]: DEBUG oslo_concurrency.lockutils [None req-b3d1021d-dab1-43cf-b5d3-632b4017ba90 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.930s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 735.347213] env[62503]: ERROR nova.compute.manager [None req-b3d1021d-dab1-43cf-b5d3-632b4017ba90 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] [instance: 3178d5cd-1937-422b-9287-970d095aa452] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 75a6eeb4-800f-4551-bad8-cad36b81a1c2, please check neutron logs for more information. [ 735.347213] env[62503]: ERROR nova.compute.manager [instance: 3178d5cd-1937-422b-9287-970d095aa452] Traceback (most recent call last): [ 735.347213] env[62503]: ERROR nova.compute.manager [instance: 3178d5cd-1937-422b-9287-970d095aa452] File "/opt/stack/nova/nova/compute/manager.py", line 2644, in _build_and_run_instance [ 735.347213] env[62503]: ERROR nova.compute.manager [instance: 3178d5cd-1937-422b-9287-970d095aa452] self.driver.spawn(context, instance, image_meta, [ 735.347213] env[62503]: ERROR nova.compute.manager [instance: 3178d5cd-1937-422b-9287-970d095aa452] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 735.347213] env[62503]: ERROR nova.compute.manager [instance: 3178d5cd-1937-422b-9287-970d095aa452] self._vmops.spawn(context, instance, image_meta, injected_files, [ 735.347213] env[62503]: ERROR nova.compute.manager [instance: 3178d5cd-1937-422b-9287-970d095aa452] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 735.347213] env[62503]: ERROR nova.compute.manager [instance: 3178d5cd-1937-422b-9287-970d095aa452] vm_ref = self.build_virtual_machine(instance, [ 735.347517] env[62503]: ERROR nova.compute.manager [instance: 3178d5cd-1937-422b-9287-970d095aa452] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 735.347517] env[62503]: ERROR nova.compute.manager [instance: 3178d5cd-1937-422b-9287-970d095aa452] vif_infos = vmwarevif.get_vif_info(self._session, [ 735.347517] env[62503]: ERROR nova.compute.manager [instance: 3178d5cd-1937-422b-9287-970d095aa452] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 735.347517] env[62503]: ERROR nova.compute.manager [instance: 3178d5cd-1937-422b-9287-970d095aa452] for vif in network_info: [ 735.347517] env[62503]: ERROR nova.compute.manager [instance: 3178d5cd-1937-422b-9287-970d095aa452] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 735.347517] env[62503]: ERROR nova.compute.manager [instance: 3178d5cd-1937-422b-9287-970d095aa452] return self._sync_wrapper(fn, *args, **kwargs) [ 735.347517] env[62503]: ERROR nova.compute.manager [instance: 3178d5cd-1937-422b-9287-970d095aa452] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 735.347517] env[62503]: ERROR nova.compute.manager [instance: 3178d5cd-1937-422b-9287-970d095aa452] self.wait() [ 735.347517] env[62503]: ERROR nova.compute.manager [instance: 3178d5cd-1937-422b-9287-970d095aa452] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 735.347517] env[62503]: ERROR nova.compute.manager [instance: 3178d5cd-1937-422b-9287-970d095aa452] self[:] = self._gt.wait() [ 735.347517] env[62503]: ERROR nova.compute.manager [instance: 3178d5cd-1937-422b-9287-970d095aa452] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 735.347517] env[62503]: ERROR nova.compute.manager [instance: 3178d5cd-1937-422b-9287-970d095aa452] return self._exit_event.wait() [ 735.347517] env[62503]: ERROR nova.compute.manager [instance: 3178d5cd-1937-422b-9287-970d095aa452] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 735.347867] env[62503]: ERROR nova.compute.manager [instance: 3178d5cd-1937-422b-9287-970d095aa452] current.throw(*self._exc) [ 735.347867] env[62503]: ERROR nova.compute.manager [instance: 3178d5cd-1937-422b-9287-970d095aa452] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 735.347867] env[62503]: ERROR nova.compute.manager [instance: 3178d5cd-1937-422b-9287-970d095aa452] result = function(*args, **kwargs) [ 735.347867] env[62503]: ERROR nova.compute.manager [instance: 3178d5cd-1937-422b-9287-970d095aa452] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 735.347867] env[62503]: ERROR nova.compute.manager [instance: 3178d5cd-1937-422b-9287-970d095aa452] return func(*args, **kwargs) [ 735.347867] env[62503]: ERROR nova.compute.manager [instance: 3178d5cd-1937-422b-9287-970d095aa452] File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 735.347867] env[62503]: ERROR nova.compute.manager [instance: 3178d5cd-1937-422b-9287-970d095aa452] raise e [ 735.347867] env[62503]: ERROR nova.compute.manager [instance: 3178d5cd-1937-422b-9287-970d095aa452] File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 735.347867] env[62503]: ERROR nova.compute.manager [instance: 3178d5cd-1937-422b-9287-970d095aa452] nwinfo = self.network_api.allocate_for_instance( [ 735.347867] env[62503]: ERROR nova.compute.manager [instance: 3178d5cd-1937-422b-9287-970d095aa452] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 735.347867] env[62503]: ERROR nova.compute.manager [instance: 3178d5cd-1937-422b-9287-970d095aa452] created_port_ids = self._update_ports_for_instance( [ 735.347867] env[62503]: ERROR nova.compute.manager [instance: 3178d5cd-1937-422b-9287-970d095aa452] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 735.347867] env[62503]: ERROR nova.compute.manager [instance: 3178d5cd-1937-422b-9287-970d095aa452] with excutils.save_and_reraise_exception(): [ 735.348232] env[62503]: ERROR nova.compute.manager [instance: 3178d5cd-1937-422b-9287-970d095aa452] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 735.348232] env[62503]: ERROR nova.compute.manager [instance: 3178d5cd-1937-422b-9287-970d095aa452] self.force_reraise() [ 735.348232] env[62503]: ERROR nova.compute.manager [instance: 3178d5cd-1937-422b-9287-970d095aa452] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 735.348232] env[62503]: ERROR nova.compute.manager [instance: 3178d5cd-1937-422b-9287-970d095aa452] raise self.value [ 735.348232] env[62503]: ERROR nova.compute.manager [instance: 3178d5cd-1937-422b-9287-970d095aa452] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 735.348232] env[62503]: ERROR nova.compute.manager [instance: 3178d5cd-1937-422b-9287-970d095aa452] updated_port = self._update_port( [ 735.348232] env[62503]: ERROR nova.compute.manager [instance: 3178d5cd-1937-422b-9287-970d095aa452] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 735.348232] env[62503]: ERROR nova.compute.manager [instance: 3178d5cd-1937-422b-9287-970d095aa452] _ensure_no_port_binding_failure(port) [ 735.348232] env[62503]: ERROR nova.compute.manager [instance: 3178d5cd-1937-422b-9287-970d095aa452] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 735.348232] env[62503]: ERROR nova.compute.manager [instance: 3178d5cd-1937-422b-9287-970d095aa452] raise exception.PortBindingFailed(port_id=port['id']) [ 735.348232] env[62503]: ERROR nova.compute.manager [instance: 3178d5cd-1937-422b-9287-970d095aa452] nova.exception.PortBindingFailed: Binding failed for port 75a6eeb4-800f-4551-bad8-cad36b81a1c2, please check neutron logs for more information. [ 735.348232] env[62503]: ERROR nova.compute.manager [instance: 3178d5cd-1937-422b-9287-970d095aa452] [ 735.348607] env[62503]: DEBUG nova.compute.utils [None req-b3d1021d-dab1-43cf-b5d3-632b4017ba90 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] [instance: 3178d5cd-1937-422b-9287-970d095aa452] Binding failed for port 75a6eeb4-800f-4551-bad8-cad36b81a1c2, please check neutron logs for more information. {{(pid=62503) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 735.348736] env[62503]: DEBUG oslo_concurrency.lockutils [None req-53bd46c4-9392-486d-a978-bb7729f67092 tempest-ServerActionsV293TestJSON-923794683 tempest-ServerActionsV293TestJSON-923794683-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 11.601s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 735.353278] env[62503]: DEBUG nova.compute.manager [None req-b3d1021d-dab1-43cf-b5d3-632b4017ba90 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] [instance: 3178d5cd-1937-422b-9287-970d095aa452] Build of instance 3178d5cd-1937-422b-9287-970d095aa452 was re-scheduled: Binding failed for port 75a6eeb4-800f-4551-bad8-cad36b81a1c2, please check neutron logs for more information. {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2483}} [ 735.353633] env[62503]: DEBUG nova.compute.manager [None req-b3d1021d-dab1-43cf-b5d3-632b4017ba90 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] [instance: 3178d5cd-1937-422b-9287-970d095aa452] Unplugging VIFs for instance {{(pid=62503) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3009}} [ 735.353880] env[62503]: DEBUG oslo_concurrency.lockutils [None req-b3d1021d-dab1-43cf-b5d3-632b4017ba90 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] Acquiring lock "refresh_cache-3178d5cd-1937-422b-9287-970d095aa452" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 735.354037] env[62503]: DEBUG oslo_concurrency.lockutils [None req-b3d1021d-dab1-43cf-b5d3-632b4017ba90 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] Acquired lock "refresh_cache-3178d5cd-1937-422b-9287-970d095aa452" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 735.354197] env[62503]: DEBUG nova.network.neutron [None req-b3d1021d-dab1-43cf-b5d3-632b4017ba90 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] [instance: 3178d5cd-1937-422b-9287-970d095aa452] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 735.619370] env[62503]: INFO nova.compute.manager [None req-9a47e67e-9a92-496f-b283-39f380d35bee tempest-ServerAddressesTestJSON-1019310862 tempest-ServerAddressesTestJSON-1019310862-project-member] [instance: ce8d9b01-e99d-4051-bd96-659692a436da] Took 1.03 seconds to deallocate network for instance. [ 735.790214] env[62503]: DEBUG nova.compute.manager [None req-35b849ab-bc6b-4b84-91df-53a0636c028a tempest-ServerMetadataNegativeTestJSON-362530580 tempest-ServerMetadataNegativeTestJSON-362530580-project-member] [instance: 48d9b18d-04b5-44e4-809e-383819d39418] Starting instance... {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 735.897225] env[62503]: DEBUG nova.network.neutron [None req-b3d1021d-dab1-43cf-b5d3-632b4017ba90 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] [instance: 3178d5cd-1937-422b-9287-970d095aa452] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 736.106189] env[62503]: DEBUG nova.network.neutron [None req-b3d1021d-dab1-43cf-b5d3-632b4017ba90 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] [instance: 3178d5cd-1937-422b-9287-970d095aa452] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 736.167720] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9bd8bfd-5589-4b25-b194-5eb1882dc0c4 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.177017] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d980af0-f017-48d4-9e1b-d4ea7943dc71 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.208569] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcf99622-fb61-4a1b-8b74-ed60e161ac36 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.216087] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fa5724e-70ae-43d5-85e6-102162f03b6b {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.232503] env[62503]: DEBUG nova.compute.provider_tree [None req-53bd46c4-9392-486d-a978-bb7729f67092 tempest-ServerActionsV293TestJSON-923794683 tempest-ServerActionsV293TestJSON-923794683-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 736.309169] env[62503]: DEBUG oslo_concurrency.lockutils [None req-35b849ab-bc6b-4b84-91df-53a0636c028a tempest-ServerMetadataNegativeTestJSON-362530580 tempest-ServerMetadataNegativeTestJSON-362530580-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 736.614669] env[62503]: DEBUG oslo_concurrency.lockutils [None req-b3d1021d-dab1-43cf-b5d3-632b4017ba90 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] Releasing lock "refresh_cache-3178d5cd-1937-422b-9287-970d095aa452" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 736.614669] env[62503]: DEBUG nova.compute.manager [None req-b3d1021d-dab1-43cf-b5d3-632b4017ba90 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62503) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3032}} [ 736.614669] env[62503]: DEBUG nova.compute.manager [None req-b3d1021d-dab1-43cf-b5d3-632b4017ba90 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] [instance: 3178d5cd-1937-422b-9287-970d095aa452] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 736.614669] env[62503]: DEBUG nova.network.neutron [None req-b3d1021d-dab1-43cf-b5d3-632b4017ba90 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] [instance: 3178d5cd-1937-422b-9287-970d095aa452] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 736.638999] env[62503]: DEBUG nova.network.neutron [None req-b3d1021d-dab1-43cf-b5d3-632b4017ba90 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] [instance: 3178d5cd-1937-422b-9287-970d095aa452] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 736.649262] env[62503]: INFO nova.scheduler.client.report [None req-9a47e67e-9a92-496f-b283-39f380d35bee tempest-ServerAddressesTestJSON-1019310862 tempest-ServerAddressesTestJSON-1019310862-project-member] Deleted allocations for instance ce8d9b01-e99d-4051-bd96-659692a436da [ 736.736536] env[62503]: DEBUG nova.scheduler.client.report [None req-53bd46c4-9392-486d-a978-bb7729f67092 tempest-ServerActionsV293TestJSON-923794683 tempest-ServerActionsV293TestJSON-923794683-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 737.141491] env[62503]: DEBUG nova.network.neutron [None req-b3d1021d-dab1-43cf-b5d3-632b4017ba90 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] [instance: 3178d5cd-1937-422b-9287-970d095aa452] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 737.160506] env[62503]: DEBUG oslo_concurrency.lockutils [None req-9a47e67e-9a92-496f-b283-39f380d35bee tempest-ServerAddressesTestJSON-1019310862 tempest-ServerAddressesTestJSON-1019310862-project-member] Lock "ce8d9b01-e99d-4051-bd96-659692a436da" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 132.112s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 737.243154] env[62503]: DEBUG oslo_concurrency.lockutils [None req-53bd46c4-9392-486d-a978-bb7729f67092 tempest-ServerActionsV293TestJSON-923794683 tempest-ServerActionsV293TestJSON-923794683-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.894s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 737.244070] env[62503]: ERROR nova.compute.manager [None req-53bd46c4-9392-486d-a978-bb7729f67092 tempest-ServerActionsV293TestJSON-923794683 tempest-ServerActionsV293TestJSON-923794683-project-member] [instance: 97ac40d6-1c29-4282-86e5-be27a20cf5e0] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 96ba244c-f6e2-4f7d-b56a-dd29b0d7721b, please check neutron logs for more information. [ 737.244070] env[62503]: ERROR nova.compute.manager [instance: 97ac40d6-1c29-4282-86e5-be27a20cf5e0] Traceback (most recent call last): [ 737.244070] env[62503]: ERROR nova.compute.manager [instance: 97ac40d6-1c29-4282-86e5-be27a20cf5e0] File "/opt/stack/nova/nova/compute/manager.py", line 2644, in _build_and_run_instance [ 737.244070] env[62503]: ERROR nova.compute.manager [instance: 97ac40d6-1c29-4282-86e5-be27a20cf5e0] self.driver.spawn(context, instance, image_meta, [ 737.244070] env[62503]: ERROR nova.compute.manager [instance: 97ac40d6-1c29-4282-86e5-be27a20cf5e0] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 737.244070] env[62503]: ERROR nova.compute.manager [instance: 97ac40d6-1c29-4282-86e5-be27a20cf5e0] self._vmops.spawn(context, instance, image_meta, injected_files, [ 737.244070] env[62503]: ERROR nova.compute.manager [instance: 97ac40d6-1c29-4282-86e5-be27a20cf5e0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 737.244070] env[62503]: ERROR nova.compute.manager [instance: 97ac40d6-1c29-4282-86e5-be27a20cf5e0] vm_ref = self.build_virtual_machine(instance, [ 737.244070] env[62503]: ERROR nova.compute.manager [instance: 97ac40d6-1c29-4282-86e5-be27a20cf5e0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 737.244070] env[62503]: ERROR nova.compute.manager [instance: 97ac40d6-1c29-4282-86e5-be27a20cf5e0] vif_infos = vmwarevif.get_vif_info(self._session, [ 737.244070] env[62503]: ERROR nova.compute.manager [instance: 97ac40d6-1c29-4282-86e5-be27a20cf5e0] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 737.244591] env[62503]: ERROR nova.compute.manager [instance: 97ac40d6-1c29-4282-86e5-be27a20cf5e0] for vif in network_info: [ 737.244591] env[62503]: ERROR nova.compute.manager [instance: 97ac40d6-1c29-4282-86e5-be27a20cf5e0] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 737.244591] env[62503]: ERROR nova.compute.manager [instance: 97ac40d6-1c29-4282-86e5-be27a20cf5e0] return self._sync_wrapper(fn, *args, **kwargs) [ 737.244591] env[62503]: ERROR nova.compute.manager [instance: 97ac40d6-1c29-4282-86e5-be27a20cf5e0] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 737.244591] env[62503]: ERROR nova.compute.manager [instance: 97ac40d6-1c29-4282-86e5-be27a20cf5e0] self.wait() [ 737.244591] env[62503]: ERROR nova.compute.manager [instance: 97ac40d6-1c29-4282-86e5-be27a20cf5e0] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 737.244591] env[62503]: ERROR nova.compute.manager [instance: 97ac40d6-1c29-4282-86e5-be27a20cf5e0] self[:] = self._gt.wait() [ 737.244591] env[62503]: ERROR nova.compute.manager [instance: 97ac40d6-1c29-4282-86e5-be27a20cf5e0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 737.244591] env[62503]: ERROR nova.compute.manager [instance: 97ac40d6-1c29-4282-86e5-be27a20cf5e0] return self._exit_event.wait() [ 737.244591] env[62503]: ERROR nova.compute.manager [instance: 97ac40d6-1c29-4282-86e5-be27a20cf5e0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 737.244591] env[62503]: ERROR nova.compute.manager [instance: 97ac40d6-1c29-4282-86e5-be27a20cf5e0] current.throw(*self._exc) [ 737.244591] env[62503]: ERROR nova.compute.manager [instance: 97ac40d6-1c29-4282-86e5-be27a20cf5e0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 737.244591] env[62503]: ERROR nova.compute.manager [instance: 97ac40d6-1c29-4282-86e5-be27a20cf5e0] result = function(*args, **kwargs) [ 737.247377] env[62503]: ERROR nova.compute.manager [instance: 97ac40d6-1c29-4282-86e5-be27a20cf5e0] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 737.247377] env[62503]: ERROR nova.compute.manager [instance: 97ac40d6-1c29-4282-86e5-be27a20cf5e0] return func(*args, **kwargs) [ 737.247377] env[62503]: ERROR nova.compute.manager [instance: 97ac40d6-1c29-4282-86e5-be27a20cf5e0] File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 737.247377] env[62503]: ERROR nova.compute.manager [instance: 97ac40d6-1c29-4282-86e5-be27a20cf5e0] raise e [ 737.247377] env[62503]: ERROR nova.compute.manager [instance: 97ac40d6-1c29-4282-86e5-be27a20cf5e0] File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 737.247377] env[62503]: ERROR nova.compute.manager [instance: 97ac40d6-1c29-4282-86e5-be27a20cf5e0] nwinfo = self.network_api.allocate_for_instance( [ 737.247377] env[62503]: ERROR nova.compute.manager [instance: 97ac40d6-1c29-4282-86e5-be27a20cf5e0] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 737.247377] env[62503]: ERROR nova.compute.manager [instance: 97ac40d6-1c29-4282-86e5-be27a20cf5e0] created_port_ids = self._update_ports_for_instance( [ 737.247377] env[62503]: ERROR nova.compute.manager [instance: 97ac40d6-1c29-4282-86e5-be27a20cf5e0] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 737.247377] env[62503]: ERROR nova.compute.manager [instance: 97ac40d6-1c29-4282-86e5-be27a20cf5e0] with excutils.save_and_reraise_exception(): [ 737.247377] env[62503]: ERROR nova.compute.manager [instance: 97ac40d6-1c29-4282-86e5-be27a20cf5e0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 737.247377] env[62503]: ERROR nova.compute.manager [instance: 97ac40d6-1c29-4282-86e5-be27a20cf5e0] self.force_reraise() [ 737.247377] env[62503]: ERROR nova.compute.manager [instance: 97ac40d6-1c29-4282-86e5-be27a20cf5e0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 737.247730] env[62503]: ERROR nova.compute.manager [instance: 97ac40d6-1c29-4282-86e5-be27a20cf5e0] raise self.value [ 737.247730] env[62503]: ERROR nova.compute.manager [instance: 97ac40d6-1c29-4282-86e5-be27a20cf5e0] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 737.247730] env[62503]: ERROR nova.compute.manager [instance: 97ac40d6-1c29-4282-86e5-be27a20cf5e0] updated_port = self._update_port( [ 737.247730] env[62503]: ERROR nova.compute.manager [instance: 97ac40d6-1c29-4282-86e5-be27a20cf5e0] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 737.247730] env[62503]: ERROR nova.compute.manager [instance: 97ac40d6-1c29-4282-86e5-be27a20cf5e0] _ensure_no_port_binding_failure(port) [ 737.247730] env[62503]: ERROR nova.compute.manager [instance: 97ac40d6-1c29-4282-86e5-be27a20cf5e0] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 737.247730] env[62503]: ERROR nova.compute.manager [instance: 97ac40d6-1c29-4282-86e5-be27a20cf5e0] raise exception.PortBindingFailed(port_id=port['id']) [ 737.247730] env[62503]: ERROR nova.compute.manager [instance: 97ac40d6-1c29-4282-86e5-be27a20cf5e0] nova.exception.PortBindingFailed: Binding failed for port 96ba244c-f6e2-4f7d-b56a-dd29b0d7721b, please check neutron logs for more information. [ 737.247730] env[62503]: ERROR nova.compute.manager [instance: 97ac40d6-1c29-4282-86e5-be27a20cf5e0] [ 737.247730] env[62503]: DEBUG nova.compute.utils [None req-53bd46c4-9392-486d-a978-bb7729f67092 tempest-ServerActionsV293TestJSON-923794683 tempest-ServerActionsV293TestJSON-923794683-project-member] [instance: 97ac40d6-1c29-4282-86e5-be27a20cf5e0] Binding failed for port 96ba244c-f6e2-4f7d-b56a-dd29b0d7721b, please check neutron logs for more information. {{(pid=62503) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 737.248013] env[62503]: DEBUG oslo_concurrency.lockutils [None req-d48c1997-54b2-410a-b1fd-f52e7748bdfd tempest-ServerGroupTestJSON-1883967840 tempest-ServerGroupTestJSON-1883967840-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.354s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 737.249775] env[62503]: INFO nova.compute.claims [None req-d48c1997-54b2-410a-b1fd-f52e7748bdfd tempest-ServerGroupTestJSON-1883967840 tempest-ServerGroupTestJSON-1883967840-project-member] [instance: be79632e-78ca-440a-92ef-d86a9f32693e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 737.255208] env[62503]: DEBUG nova.compute.manager [None req-53bd46c4-9392-486d-a978-bb7729f67092 tempest-ServerActionsV293TestJSON-923794683 tempest-ServerActionsV293TestJSON-923794683-project-member] [instance: 97ac40d6-1c29-4282-86e5-be27a20cf5e0] Build of instance 97ac40d6-1c29-4282-86e5-be27a20cf5e0 was re-scheduled: Binding failed for port 96ba244c-f6e2-4f7d-b56a-dd29b0d7721b, please check neutron logs for more information. {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2483}} [ 737.257746] env[62503]: DEBUG nova.compute.manager [None req-53bd46c4-9392-486d-a978-bb7729f67092 tempest-ServerActionsV293TestJSON-923794683 tempest-ServerActionsV293TestJSON-923794683-project-member] [instance: 97ac40d6-1c29-4282-86e5-be27a20cf5e0] Unplugging VIFs for instance {{(pid=62503) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3009}} [ 737.257746] env[62503]: DEBUG oslo_concurrency.lockutils [None req-53bd46c4-9392-486d-a978-bb7729f67092 tempest-ServerActionsV293TestJSON-923794683 tempest-ServerActionsV293TestJSON-923794683-project-member] Acquiring lock "refresh_cache-97ac40d6-1c29-4282-86e5-be27a20cf5e0" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 737.257746] env[62503]: DEBUG oslo_concurrency.lockutils [None req-53bd46c4-9392-486d-a978-bb7729f67092 tempest-ServerActionsV293TestJSON-923794683 tempest-ServerActionsV293TestJSON-923794683-project-member] Acquired lock "refresh_cache-97ac40d6-1c29-4282-86e5-be27a20cf5e0" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 737.257746] env[62503]: DEBUG nova.network.neutron [None req-53bd46c4-9392-486d-a978-bb7729f67092 tempest-ServerActionsV293TestJSON-923794683 tempest-ServerActionsV293TestJSON-923794683-project-member] [instance: 97ac40d6-1c29-4282-86e5-be27a20cf5e0] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 737.643424] env[62503]: INFO nova.compute.manager [None req-b3d1021d-dab1-43cf-b5d3-632b4017ba90 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] [instance: 3178d5cd-1937-422b-9287-970d095aa452] Took 1.03 seconds to deallocate network for instance. [ 737.666494] env[62503]: DEBUG nova.compute.manager [None req-5f6b79ba-d091-4f79-8bf5-714be49e7af9 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] [instance: b9259ced-344a-42e5-835d-3713631a68c7] Starting instance... {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 737.785157] env[62503]: DEBUG nova.network.neutron [None req-53bd46c4-9392-486d-a978-bb7729f67092 tempest-ServerActionsV293TestJSON-923794683 tempest-ServerActionsV293TestJSON-923794683-project-member] [instance: 97ac40d6-1c29-4282-86e5-be27a20cf5e0] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 737.959162] env[62503]: DEBUG nova.network.neutron [None req-53bd46c4-9392-486d-a978-bb7729f67092 tempest-ServerActionsV293TestJSON-923794683 tempest-ServerActionsV293TestJSON-923794683-project-member] [instance: 97ac40d6-1c29-4282-86e5-be27a20cf5e0] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 738.204794] env[62503]: DEBUG oslo_concurrency.lockutils [None req-5f6b79ba-d091-4f79-8bf5-714be49e7af9 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 738.463025] env[62503]: DEBUG oslo_concurrency.lockutils [None req-53bd46c4-9392-486d-a978-bb7729f67092 tempest-ServerActionsV293TestJSON-923794683 tempest-ServerActionsV293TestJSON-923794683-project-member] Releasing lock "refresh_cache-97ac40d6-1c29-4282-86e5-be27a20cf5e0" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 738.463132] env[62503]: DEBUG nova.compute.manager [None req-53bd46c4-9392-486d-a978-bb7729f67092 tempest-ServerActionsV293TestJSON-923794683 tempest-ServerActionsV293TestJSON-923794683-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62503) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3032}} [ 738.463304] env[62503]: DEBUG nova.compute.manager [None req-53bd46c4-9392-486d-a978-bb7729f67092 tempest-ServerActionsV293TestJSON-923794683 tempest-ServerActionsV293TestJSON-923794683-project-member] [instance: 97ac40d6-1c29-4282-86e5-be27a20cf5e0] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 738.463469] env[62503]: DEBUG nova.network.neutron [None req-53bd46c4-9392-486d-a978-bb7729f67092 tempest-ServerActionsV293TestJSON-923794683 tempest-ServerActionsV293TestJSON-923794683-project-member] [instance: 97ac40d6-1c29-4282-86e5-be27a20cf5e0] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 738.487039] env[62503]: DEBUG nova.network.neutron [None req-53bd46c4-9392-486d-a978-bb7729f67092 tempest-ServerActionsV293TestJSON-923794683 tempest-ServerActionsV293TestJSON-923794683-project-member] [instance: 97ac40d6-1c29-4282-86e5-be27a20cf5e0] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 738.542177] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93c922c7-5e23-4190-ab17-b875f72d6b8a {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.549319] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5281ee2e-b1e9-40d6-9182-d747221a9de1 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.584609] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bdacae5-c107-4631-98b1-7b2a324fce3d {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.592220] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-538828ef-1242-466d-aeb8-876680fd99a6 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.606088] env[62503]: DEBUG nova.compute.provider_tree [None req-d48c1997-54b2-410a-b1fd-f52e7748bdfd tempest-ServerGroupTestJSON-1883967840 tempest-ServerGroupTestJSON-1883967840-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 738.681624] env[62503]: INFO nova.scheduler.client.report [None req-b3d1021d-dab1-43cf-b5d3-632b4017ba90 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] Deleted allocations for instance 3178d5cd-1937-422b-9287-970d095aa452 [ 738.842261] env[62503]: DEBUG oslo_concurrency.lockutils [None req-47a18854-ca82-4c3a-9a6e-798e903be44b tempest-ServerRescueTestJSONUnderV235-862506334 tempest-ServerRescueTestJSONUnderV235-862506334-project-member] Acquiring lock "4cb117e3-ff57-4e7f-bb2b-a12c988e362c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 738.842550] env[62503]: DEBUG oslo_concurrency.lockutils [None req-47a18854-ca82-4c3a-9a6e-798e903be44b tempest-ServerRescueTestJSONUnderV235-862506334 tempest-ServerRescueTestJSONUnderV235-862506334-project-member] Lock "4cb117e3-ff57-4e7f-bb2b-a12c988e362c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 738.988546] env[62503]: DEBUG nova.network.neutron [None req-53bd46c4-9392-486d-a978-bb7729f67092 tempest-ServerActionsV293TestJSON-923794683 tempest-ServerActionsV293TestJSON-923794683-project-member] [instance: 97ac40d6-1c29-4282-86e5-be27a20cf5e0] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 739.112022] env[62503]: DEBUG nova.scheduler.client.report [None req-d48c1997-54b2-410a-b1fd-f52e7748bdfd tempest-ServerGroupTestJSON-1883967840 tempest-ServerGroupTestJSON-1883967840-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 739.196098] env[62503]: DEBUG oslo_concurrency.lockutils [None req-b3d1021d-dab1-43cf-b5d3-632b4017ba90 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] Lock "3178d5cd-1937-422b-9287-970d095aa452" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 130.661s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 739.491645] env[62503]: INFO nova.compute.manager [None req-53bd46c4-9392-486d-a978-bb7729f67092 tempest-ServerActionsV293TestJSON-923794683 tempest-ServerActionsV293TestJSON-923794683-project-member] [instance: 97ac40d6-1c29-4282-86e5-be27a20cf5e0] Took 1.03 seconds to deallocate network for instance. [ 739.616338] env[62503]: DEBUG oslo_concurrency.lockutils [None req-d48c1997-54b2-410a-b1fd-f52e7748bdfd tempest-ServerGroupTestJSON-1883967840 tempest-ServerGroupTestJSON-1883967840-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.369s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 739.617870] env[62503]: DEBUG oslo_concurrency.lockutils [None req-ef39e50f-5ce8-4286-9c89-acf2f5f9a4d4 tempest-VolumesAdminNegativeTest-585860034 tempest-VolumesAdminNegativeTest-585860034-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 13.092s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 739.700129] env[62503]: DEBUG nova.compute.manager [None req-7b8e1dad-5e5d-4734-b8ed-b199db0d06eb tempest-ServersNegativeTestMultiTenantJSON-701390068 tempest-ServersNegativeTestMultiTenantJSON-701390068-project-member] [instance: cf6fb485-9672-42b5-ac88-bbf5e0941393] Starting instance... {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 740.122832] env[62503]: DEBUG oslo_concurrency.lockutils [None req-d48c1997-54b2-410a-b1fd-f52e7748bdfd tempest-ServerGroupTestJSON-1883967840 tempest-ServerGroupTestJSON-1883967840-project-member] Acquiring lock "9e6edc9e-af42-4ffe-801b-6e66349b7f5a" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 740.123137] env[62503]: DEBUG oslo_concurrency.lockutils [None req-d48c1997-54b2-410a-b1fd-f52e7748bdfd tempest-ServerGroupTestJSON-1883967840 tempest-ServerGroupTestJSON-1883967840-project-member] Lock "9e6edc9e-af42-4ffe-801b-6e66349b7f5a" acquired by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 740.226677] env[62503]: DEBUG oslo_concurrency.lockutils [None req-7b8e1dad-5e5d-4734-b8ed-b199db0d06eb tempest-ServersNegativeTestMultiTenantJSON-701390068 tempest-ServersNegativeTestMultiTenantJSON-701390068-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 740.439743] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f6481b0-256c-4fa7-b4a3-08c7478e9594 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.444473] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e05366f-74de-494e-b7f1-7462de52da9c {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.475416] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e1d5fb3-767c-4da0-8fbc-0002bfe4947b {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.482131] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-916b58f2-c6ae-40c0-9fe7-2b22e8c82bcb {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.495319] env[62503]: DEBUG nova.compute.provider_tree [None req-ef39e50f-5ce8-4286-9c89-acf2f5f9a4d4 tempest-VolumesAdminNegativeTest-585860034 tempest-VolumesAdminNegativeTest-585860034-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 740.530751] env[62503]: INFO nova.scheduler.client.report [None req-53bd46c4-9392-486d-a978-bb7729f67092 tempest-ServerActionsV293TestJSON-923794683 tempest-ServerActionsV293TestJSON-923794683-project-member] Deleted allocations for instance 97ac40d6-1c29-4282-86e5-be27a20cf5e0 [ 740.630786] env[62503]: DEBUG oslo_concurrency.lockutils [None req-d48c1997-54b2-410a-b1fd-f52e7748bdfd tempest-ServerGroupTestJSON-1883967840 tempest-ServerGroupTestJSON-1883967840-project-member] Lock "9e6edc9e-af42-4ffe-801b-6e66349b7f5a" "released" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: held 0.507s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 740.631412] env[62503]: DEBUG nova.compute.manager [None req-d48c1997-54b2-410a-b1fd-f52e7748bdfd tempest-ServerGroupTestJSON-1883967840 tempest-ServerGroupTestJSON-1883967840-project-member] [instance: be79632e-78ca-440a-92ef-d86a9f32693e] Start building networks asynchronously for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2832}} [ 741.005097] env[62503]: DEBUG nova.scheduler.client.report [None req-ef39e50f-5ce8-4286-9c89-acf2f5f9a4d4 tempest-VolumesAdminNegativeTest-585860034 tempest-VolumesAdminNegativeTest-585860034-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 741.038768] env[62503]: DEBUG oslo_concurrency.lockutils [None req-53bd46c4-9392-486d-a978-bb7729f67092 tempest-ServerActionsV293TestJSON-923794683 tempest-ServerActionsV293TestJSON-923794683-project-member] Lock "97ac40d6-1c29-4282-86e5-be27a20cf5e0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 131.324s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 741.138056] env[62503]: DEBUG nova.compute.utils [None req-d48c1997-54b2-410a-b1fd-f52e7748bdfd tempest-ServerGroupTestJSON-1883967840 tempest-ServerGroupTestJSON-1883967840-project-member] Using /dev/sd instead of None {{(pid=62503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 741.138056] env[62503]: DEBUG nova.compute.manager [None req-d48c1997-54b2-410a-b1fd-f52e7748bdfd tempest-ServerGroupTestJSON-1883967840 tempest-ServerGroupTestJSON-1883967840-project-member] [instance: be79632e-78ca-440a-92ef-d86a9f32693e] Allocating IP information in the background. {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 741.138056] env[62503]: DEBUG nova.network.neutron [None req-d48c1997-54b2-410a-b1fd-f52e7748bdfd tempest-ServerGroupTestJSON-1883967840 tempest-ServerGroupTestJSON-1883967840-project-member] [instance: be79632e-78ca-440a-92ef-d86a9f32693e] allocate_for_instance() {{(pid=62503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 741.231619] env[62503]: DEBUG nova.policy [None req-d48c1997-54b2-410a-b1fd-f52e7748bdfd tempest-ServerGroupTestJSON-1883967840 tempest-ServerGroupTestJSON-1883967840-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a4bd1e2e74af4f009326e4157f943f0f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '687d6ec6722d48219f6cbabe22069c24', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62503) authorize /opt/stack/nova/nova/policy.py:201}} [ 741.513885] env[62503]: DEBUG oslo_concurrency.lockutils [None req-ef39e50f-5ce8-4286-9c89-acf2f5f9a4d4 tempest-VolumesAdminNegativeTest-585860034 tempest-VolumesAdminNegativeTest-585860034-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.892s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 741.513885] env[62503]: ERROR nova.compute.manager [None req-ef39e50f-5ce8-4286-9c89-acf2f5f9a4d4 tempest-VolumesAdminNegativeTest-585860034 tempest-VolumesAdminNegativeTest-585860034-project-member] [instance: 0b40f385-db0a-460c-b7fd-47e4d6afbaf9] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port b1a727a8-b3e3-44dd-8a7a-6fd89dcfa153, please check neutron logs for more information. [ 741.513885] env[62503]: ERROR nova.compute.manager [instance: 0b40f385-db0a-460c-b7fd-47e4d6afbaf9] Traceback (most recent call last): [ 741.513885] env[62503]: ERROR nova.compute.manager [instance: 0b40f385-db0a-460c-b7fd-47e4d6afbaf9] File "/opt/stack/nova/nova/compute/manager.py", line 2644, in _build_and_run_instance [ 741.513885] env[62503]: ERROR nova.compute.manager [instance: 0b40f385-db0a-460c-b7fd-47e4d6afbaf9] self.driver.spawn(context, instance, image_meta, [ 741.513885] env[62503]: ERROR nova.compute.manager [instance: 0b40f385-db0a-460c-b7fd-47e4d6afbaf9] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 741.513885] env[62503]: ERROR nova.compute.manager [instance: 0b40f385-db0a-460c-b7fd-47e4d6afbaf9] self._vmops.spawn(context, instance, image_meta, injected_files, [ 741.513885] env[62503]: ERROR nova.compute.manager [instance: 0b40f385-db0a-460c-b7fd-47e4d6afbaf9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 741.513885] env[62503]: ERROR nova.compute.manager [instance: 0b40f385-db0a-460c-b7fd-47e4d6afbaf9] vm_ref = self.build_virtual_machine(instance, [ 741.514310] env[62503]: ERROR nova.compute.manager [instance: 0b40f385-db0a-460c-b7fd-47e4d6afbaf9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 741.514310] env[62503]: ERROR nova.compute.manager [instance: 0b40f385-db0a-460c-b7fd-47e4d6afbaf9] vif_infos = vmwarevif.get_vif_info(self._session, [ 741.514310] env[62503]: ERROR nova.compute.manager [instance: 0b40f385-db0a-460c-b7fd-47e4d6afbaf9] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 741.514310] env[62503]: ERROR nova.compute.manager [instance: 0b40f385-db0a-460c-b7fd-47e4d6afbaf9] for vif in network_info: [ 741.514310] env[62503]: ERROR nova.compute.manager [instance: 0b40f385-db0a-460c-b7fd-47e4d6afbaf9] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 741.514310] env[62503]: ERROR nova.compute.manager [instance: 0b40f385-db0a-460c-b7fd-47e4d6afbaf9] return self._sync_wrapper(fn, *args, **kwargs) [ 741.514310] env[62503]: ERROR nova.compute.manager [instance: 0b40f385-db0a-460c-b7fd-47e4d6afbaf9] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 741.514310] env[62503]: ERROR nova.compute.manager [instance: 0b40f385-db0a-460c-b7fd-47e4d6afbaf9] self.wait() [ 741.514310] env[62503]: ERROR nova.compute.manager [instance: 0b40f385-db0a-460c-b7fd-47e4d6afbaf9] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 741.514310] env[62503]: ERROR nova.compute.manager [instance: 0b40f385-db0a-460c-b7fd-47e4d6afbaf9] self[:] = self._gt.wait() [ 741.514310] env[62503]: ERROR nova.compute.manager [instance: 0b40f385-db0a-460c-b7fd-47e4d6afbaf9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 741.514310] env[62503]: ERROR nova.compute.manager [instance: 0b40f385-db0a-460c-b7fd-47e4d6afbaf9] return self._exit_event.wait() [ 741.514310] env[62503]: ERROR nova.compute.manager [instance: 0b40f385-db0a-460c-b7fd-47e4d6afbaf9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 741.514678] env[62503]: ERROR nova.compute.manager [instance: 0b40f385-db0a-460c-b7fd-47e4d6afbaf9] current.throw(*self._exc) [ 741.514678] env[62503]: ERROR nova.compute.manager [instance: 0b40f385-db0a-460c-b7fd-47e4d6afbaf9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 741.514678] env[62503]: ERROR nova.compute.manager [instance: 0b40f385-db0a-460c-b7fd-47e4d6afbaf9] result = function(*args, **kwargs) [ 741.514678] env[62503]: ERROR nova.compute.manager [instance: 0b40f385-db0a-460c-b7fd-47e4d6afbaf9] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 741.514678] env[62503]: ERROR nova.compute.manager [instance: 0b40f385-db0a-460c-b7fd-47e4d6afbaf9] return func(*args, **kwargs) [ 741.514678] env[62503]: ERROR nova.compute.manager [instance: 0b40f385-db0a-460c-b7fd-47e4d6afbaf9] File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 741.514678] env[62503]: ERROR nova.compute.manager [instance: 0b40f385-db0a-460c-b7fd-47e4d6afbaf9] raise e [ 741.514678] env[62503]: ERROR nova.compute.manager [instance: 0b40f385-db0a-460c-b7fd-47e4d6afbaf9] File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 741.514678] env[62503]: ERROR nova.compute.manager [instance: 0b40f385-db0a-460c-b7fd-47e4d6afbaf9] nwinfo = self.network_api.allocate_for_instance( [ 741.514678] env[62503]: ERROR nova.compute.manager [instance: 0b40f385-db0a-460c-b7fd-47e4d6afbaf9] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 741.514678] env[62503]: ERROR nova.compute.manager [instance: 0b40f385-db0a-460c-b7fd-47e4d6afbaf9] created_port_ids = self._update_ports_for_instance( [ 741.514678] env[62503]: ERROR nova.compute.manager [instance: 0b40f385-db0a-460c-b7fd-47e4d6afbaf9] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 741.514678] env[62503]: ERROR nova.compute.manager [instance: 0b40f385-db0a-460c-b7fd-47e4d6afbaf9] with excutils.save_and_reraise_exception(): [ 741.515051] env[62503]: ERROR nova.compute.manager [instance: 0b40f385-db0a-460c-b7fd-47e4d6afbaf9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 741.515051] env[62503]: ERROR nova.compute.manager [instance: 0b40f385-db0a-460c-b7fd-47e4d6afbaf9] self.force_reraise() [ 741.515051] env[62503]: ERROR nova.compute.manager [instance: 0b40f385-db0a-460c-b7fd-47e4d6afbaf9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 741.515051] env[62503]: ERROR nova.compute.manager [instance: 0b40f385-db0a-460c-b7fd-47e4d6afbaf9] raise self.value [ 741.515051] env[62503]: ERROR nova.compute.manager [instance: 0b40f385-db0a-460c-b7fd-47e4d6afbaf9] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 741.515051] env[62503]: ERROR nova.compute.manager [instance: 0b40f385-db0a-460c-b7fd-47e4d6afbaf9] updated_port = self._update_port( [ 741.515051] env[62503]: ERROR nova.compute.manager [instance: 0b40f385-db0a-460c-b7fd-47e4d6afbaf9] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 741.515051] env[62503]: ERROR nova.compute.manager [instance: 0b40f385-db0a-460c-b7fd-47e4d6afbaf9] _ensure_no_port_binding_failure(port) [ 741.515051] env[62503]: ERROR nova.compute.manager [instance: 0b40f385-db0a-460c-b7fd-47e4d6afbaf9] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 741.515051] env[62503]: ERROR nova.compute.manager [instance: 0b40f385-db0a-460c-b7fd-47e4d6afbaf9] raise exception.PortBindingFailed(port_id=port['id']) [ 741.515051] env[62503]: ERROR nova.compute.manager [instance: 0b40f385-db0a-460c-b7fd-47e4d6afbaf9] nova.exception.PortBindingFailed: Binding failed for port b1a727a8-b3e3-44dd-8a7a-6fd89dcfa153, please check neutron logs for more information. [ 741.515051] env[62503]: ERROR nova.compute.manager [instance: 0b40f385-db0a-460c-b7fd-47e4d6afbaf9] [ 741.515389] env[62503]: DEBUG nova.compute.utils [None req-ef39e50f-5ce8-4286-9c89-acf2f5f9a4d4 tempest-VolumesAdminNegativeTest-585860034 tempest-VolumesAdminNegativeTest-585860034-project-member] [instance: 0b40f385-db0a-460c-b7fd-47e4d6afbaf9] Binding failed for port b1a727a8-b3e3-44dd-8a7a-6fd89dcfa153, please check neutron logs for more information. {{(pid=62503) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 741.515389] env[62503]: DEBUG oslo_concurrency.lockutils [None req-98b7d7bf-9e47-4fe2-b967-b3c7085ad444 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 13.307s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 741.516490] env[62503]: DEBUG nova.compute.manager [None req-ef39e50f-5ce8-4286-9c89-acf2f5f9a4d4 tempest-VolumesAdminNegativeTest-585860034 tempest-VolumesAdminNegativeTest-585860034-project-member] [instance: 0b40f385-db0a-460c-b7fd-47e4d6afbaf9] Build of instance 0b40f385-db0a-460c-b7fd-47e4d6afbaf9 was re-scheduled: Binding failed for port b1a727a8-b3e3-44dd-8a7a-6fd89dcfa153, please check neutron logs for more information. {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2483}} [ 741.516931] env[62503]: DEBUG nova.compute.manager [None req-ef39e50f-5ce8-4286-9c89-acf2f5f9a4d4 tempest-VolumesAdminNegativeTest-585860034 tempest-VolumesAdminNegativeTest-585860034-project-member] [instance: 0b40f385-db0a-460c-b7fd-47e4d6afbaf9] Unplugging VIFs for instance {{(pid=62503) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3009}} [ 741.518707] env[62503]: DEBUG oslo_concurrency.lockutils [None req-ef39e50f-5ce8-4286-9c89-acf2f5f9a4d4 tempest-VolumesAdminNegativeTest-585860034 tempest-VolumesAdminNegativeTest-585860034-project-member] Acquiring lock "refresh_cache-0b40f385-db0a-460c-b7fd-47e4d6afbaf9" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 741.518917] env[62503]: DEBUG oslo_concurrency.lockutils [None req-ef39e50f-5ce8-4286-9c89-acf2f5f9a4d4 tempest-VolumesAdminNegativeTest-585860034 tempest-VolumesAdminNegativeTest-585860034-project-member] Acquired lock "refresh_cache-0b40f385-db0a-460c-b7fd-47e4d6afbaf9" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 741.519138] env[62503]: DEBUG nova.network.neutron [None req-ef39e50f-5ce8-4286-9c89-acf2f5f9a4d4 tempest-VolumesAdminNegativeTest-585860034 tempest-VolumesAdminNegativeTest-585860034-project-member] [instance: 0b40f385-db0a-460c-b7fd-47e4d6afbaf9] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 741.542123] env[62503]: DEBUG nova.compute.manager [None req-be269e0c-079d-42ba-b169-f1c6935976f8 tempest-ServerMetadataTestJSON-2107799143 tempest-ServerMetadataTestJSON-2107799143-project-member] [instance: ff56659a-18f8-44c5-ab10-872e636a9357] Starting instance... {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 741.642403] env[62503]: DEBUG oslo_concurrency.lockutils [None req-0f7e3892-329e-443e-a8e9-e702f02c6757 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] Acquiring lock "09688e22-9225-4619-a9aa-eddb332cb8ab" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 741.642612] env[62503]: DEBUG oslo_concurrency.lockutils [None req-0f7e3892-329e-443e-a8e9-e702f02c6757 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] Lock "09688e22-9225-4619-a9aa-eddb332cb8ab" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 741.643372] env[62503]: DEBUG nova.compute.manager [None req-d48c1997-54b2-410a-b1fd-f52e7748bdfd tempest-ServerGroupTestJSON-1883967840 tempest-ServerGroupTestJSON-1883967840-project-member] [instance: be79632e-78ca-440a-92ef-d86a9f32693e] Start building block device mappings for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2867}} [ 741.732031] env[62503]: DEBUG nova.network.neutron [None req-d48c1997-54b2-410a-b1fd-f52e7748bdfd tempest-ServerGroupTestJSON-1883967840 tempest-ServerGroupTestJSON-1883967840-project-member] [instance: be79632e-78ca-440a-92ef-d86a9f32693e] Successfully created port: 8c89f1b3-1e7b-4a8d-94c5-7b98ceba3b34 {{(pid=62503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 742.064515] env[62503]: DEBUG nova.network.neutron [None req-ef39e50f-5ce8-4286-9c89-acf2f5f9a4d4 tempest-VolumesAdminNegativeTest-585860034 tempest-VolumesAdminNegativeTest-585860034-project-member] [instance: 0b40f385-db0a-460c-b7fd-47e4d6afbaf9] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 742.078974] env[62503]: DEBUG oslo_concurrency.lockutils [None req-be269e0c-079d-42ba-b169-f1c6935976f8 tempest-ServerMetadataTestJSON-2107799143 tempest-ServerMetadataTestJSON-2107799143-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 742.215392] env[62503]: DEBUG nova.network.neutron [None req-ef39e50f-5ce8-4286-9c89-acf2f5f9a4d4 tempest-VolumesAdminNegativeTest-585860034 tempest-VolumesAdminNegativeTest-585860034-project-member] [instance: 0b40f385-db0a-460c-b7fd-47e4d6afbaf9] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 742.391163] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72867236-879b-417b-b584-7b2bf963338c {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.398699] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb8d48ad-9e19-4d43-8cdb-820ea024bb4b {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.435030] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05eb6e07-89e3-4959-b196-2581d63cfa82 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.443603] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7876378d-a1b6-4aeb-88fc-29787cb27263 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.456923] env[62503]: DEBUG nova.compute.provider_tree [None req-98b7d7bf-9e47-4fe2-b967-b3c7085ad444 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 742.659338] env[62503]: DEBUG nova.compute.manager [None req-d48c1997-54b2-410a-b1fd-f52e7748bdfd tempest-ServerGroupTestJSON-1883967840 tempest-ServerGroupTestJSON-1883967840-project-member] [instance: be79632e-78ca-440a-92ef-d86a9f32693e] Start spawning the instance on the hypervisor. {{(pid=62503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2641}} [ 742.689447] env[62503]: DEBUG nova.virt.hardware [None req-d48c1997-54b2-410a-b1fd-f52e7748bdfd tempest-ServerGroupTestJSON-1883967840 tempest-ServerGroupTestJSON-1883967840-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:26:20Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:26:03Z,direct_url=,disk_format='vmdk',id=8150ca02-f879-471d-8913-459408f127a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26d9ee75d25b4018b8bb1fb11c8bd98c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:26:04Z,virtual_size=,visibility=), allow threads: False {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 742.689730] env[62503]: DEBUG nova.virt.hardware [None req-d48c1997-54b2-410a-b1fd-f52e7748bdfd tempest-ServerGroupTestJSON-1883967840 tempest-ServerGroupTestJSON-1883967840-project-member] Flavor limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 742.689881] env[62503]: DEBUG nova.virt.hardware [None req-d48c1997-54b2-410a-b1fd-f52e7748bdfd tempest-ServerGroupTestJSON-1883967840 tempest-ServerGroupTestJSON-1883967840-project-member] Image limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 742.690251] env[62503]: DEBUG nova.virt.hardware [None req-d48c1997-54b2-410a-b1fd-f52e7748bdfd tempest-ServerGroupTestJSON-1883967840 tempest-ServerGroupTestJSON-1883967840-project-member] Flavor pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 742.690488] env[62503]: DEBUG nova.virt.hardware [None req-d48c1997-54b2-410a-b1fd-f52e7748bdfd tempest-ServerGroupTestJSON-1883967840 tempest-ServerGroupTestJSON-1883967840-project-member] Image pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 742.690990] env[62503]: DEBUG nova.virt.hardware [None req-d48c1997-54b2-410a-b1fd-f52e7748bdfd tempest-ServerGroupTestJSON-1883967840 tempest-ServerGroupTestJSON-1883967840-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 742.690990] env[62503]: DEBUG nova.virt.hardware [None req-d48c1997-54b2-410a-b1fd-f52e7748bdfd tempest-ServerGroupTestJSON-1883967840 tempest-ServerGroupTestJSON-1883967840-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 742.691186] env[62503]: DEBUG nova.virt.hardware [None req-d48c1997-54b2-410a-b1fd-f52e7748bdfd tempest-ServerGroupTestJSON-1883967840 tempest-ServerGroupTestJSON-1883967840-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 742.691231] env[62503]: DEBUG nova.virt.hardware [None req-d48c1997-54b2-410a-b1fd-f52e7748bdfd tempest-ServerGroupTestJSON-1883967840 tempest-ServerGroupTestJSON-1883967840-project-member] Got 1 possible topologies {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 742.691553] env[62503]: DEBUG nova.virt.hardware [None req-d48c1997-54b2-410a-b1fd-f52e7748bdfd tempest-ServerGroupTestJSON-1883967840 tempest-ServerGroupTestJSON-1883967840-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 742.691553] env[62503]: DEBUG nova.virt.hardware [None req-d48c1997-54b2-410a-b1fd-f52e7748bdfd tempest-ServerGroupTestJSON-1883967840 tempest-ServerGroupTestJSON-1883967840-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 742.692443] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ce12c32-b5b8-46cb-93d2-94223af564ee {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.701170] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dff76058-50cd-4492-894b-f785e0e9f891 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.721356] env[62503]: DEBUG oslo_concurrency.lockutils [None req-ef39e50f-5ce8-4286-9c89-acf2f5f9a4d4 tempest-VolumesAdminNegativeTest-585860034 tempest-VolumesAdminNegativeTest-585860034-project-member] Releasing lock "refresh_cache-0b40f385-db0a-460c-b7fd-47e4d6afbaf9" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 742.722654] env[62503]: DEBUG nova.compute.manager [None req-ef39e50f-5ce8-4286-9c89-acf2f5f9a4d4 tempest-VolumesAdminNegativeTest-585860034 tempest-VolumesAdminNegativeTest-585860034-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62503) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3032}} [ 742.722972] env[62503]: DEBUG nova.compute.manager [None req-ef39e50f-5ce8-4286-9c89-acf2f5f9a4d4 tempest-VolumesAdminNegativeTest-585860034 tempest-VolumesAdminNegativeTest-585860034-project-member] [instance: 0b40f385-db0a-460c-b7fd-47e4d6afbaf9] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 742.723713] env[62503]: DEBUG nova.network.neutron [None req-ef39e50f-5ce8-4286-9c89-acf2f5f9a4d4 tempest-VolumesAdminNegativeTest-585860034 tempest-VolumesAdminNegativeTest-585860034-project-member] [instance: 0b40f385-db0a-460c-b7fd-47e4d6afbaf9] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 742.746860] env[62503]: DEBUG nova.network.neutron [None req-ef39e50f-5ce8-4286-9c89-acf2f5f9a4d4 tempest-VolumesAdminNegativeTest-585860034 tempest-VolumesAdminNegativeTest-585860034-project-member] [instance: 0b40f385-db0a-460c-b7fd-47e4d6afbaf9] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 742.965063] env[62503]: DEBUG nova.scheduler.client.report [None req-98b7d7bf-9e47-4fe2-b967-b3c7085ad444 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 743.250567] env[62503]: DEBUG nova.network.neutron [None req-ef39e50f-5ce8-4286-9c89-acf2f5f9a4d4 tempest-VolumesAdminNegativeTest-585860034 tempest-VolumesAdminNegativeTest-585860034-project-member] [instance: 0b40f385-db0a-460c-b7fd-47e4d6afbaf9] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 743.344397] env[62503]: DEBUG nova.compute.manager [req-104e0038-81ab-400b-b319-3233b943b779 req-c1a068ba-3dfd-4918-8011-178a62c9f277 service nova] [instance: be79632e-78ca-440a-92ef-d86a9f32693e] Received event network-changed-8c89f1b3-1e7b-4a8d-94c5-7b98ceba3b34 {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 743.344397] env[62503]: DEBUG nova.compute.manager [req-104e0038-81ab-400b-b319-3233b943b779 req-c1a068ba-3dfd-4918-8011-178a62c9f277 service nova] [instance: be79632e-78ca-440a-92ef-d86a9f32693e] Refreshing instance network info cache due to event network-changed-8c89f1b3-1e7b-4a8d-94c5-7b98ceba3b34. {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11432}} [ 743.344605] env[62503]: DEBUG oslo_concurrency.lockutils [req-104e0038-81ab-400b-b319-3233b943b779 req-c1a068ba-3dfd-4918-8011-178a62c9f277 service nova] Acquiring lock "refresh_cache-be79632e-78ca-440a-92ef-d86a9f32693e" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 743.344914] env[62503]: DEBUG oslo_concurrency.lockutils [req-104e0038-81ab-400b-b319-3233b943b779 req-c1a068ba-3dfd-4918-8011-178a62c9f277 service nova] Acquired lock "refresh_cache-be79632e-78ca-440a-92ef-d86a9f32693e" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 743.345664] env[62503]: DEBUG nova.network.neutron [req-104e0038-81ab-400b-b319-3233b943b779 req-c1a068ba-3dfd-4918-8011-178a62c9f277 service nova] [instance: be79632e-78ca-440a-92ef-d86a9f32693e] Refreshing network info cache for port 8c89f1b3-1e7b-4a8d-94c5-7b98ceba3b34 {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 743.473911] env[62503]: DEBUG oslo_concurrency.lockutils [None req-98b7d7bf-9e47-4fe2-b967-b3c7085ad444 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.959s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 743.473911] env[62503]: ERROR nova.compute.manager [None req-98b7d7bf-9e47-4fe2-b967-b3c7085ad444 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: c1693c1f-6497-429c-a7f7-5bf5591684d1] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 4e5bf34c-de80-4cd1-9d3f-8a7735e994a3, please check neutron logs for more information. [ 743.473911] env[62503]: ERROR nova.compute.manager [instance: c1693c1f-6497-429c-a7f7-5bf5591684d1] Traceback (most recent call last): [ 743.473911] env[62503]: ERROR nova.compute.manager [instance: c1693c1f-6497-429c-a7f7-5bf5591684d1] File "/opt/stack/nova/nova/compute/manager.py", line 2644, in _build_and_run_instance [ 743.473911] env[62503]: ERROR nova.compute.manager [instance: c1693c1f-6497-429c-a7f7-5bf5591684d1] self.driver.spawn(context, instance, image_meta, [ 743.473911] env[62503]: ERROR nova.compute.manager [instance: c1693c1f-6497-429c-a7f7-5bf5591684d1] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 743.473911] env[62503]: ERROR nova.compute.manager [instance: c1693c1f-6497-429c-a7f7-5bf5591684d1] self._vmops.spawn(context, instance, image_meta, injected_files, [ 743.473911] env[62503]: ERROR nova.compute.manager [instance: c1693c1f-6497-429c-a7f7-5bf5591684d1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 743.473911] env[62503]: ERROR nova.compute.manager [instance: c1693c1f-6497-429c-a7f7-5bf5591684d1] vm_ref = self.build_virtual_machine(instance, [ 743.474258] env[62503]: ERROR nova.compute.manager [instance: c1693c1f-6497-429c-a7f7-5bf5591684d1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 743.474258] env[62503]: ERROR nova.compute.manager [instance: c1693c1f-6497-429c-a7f7-5bf5591684d1] vif_infos = vmwarevif.get_vif_info(self._session, [ 743.474258] env[62503]: ERROR nova.compute.manager [instance: c1693c1f-6497-429c-a7f7-5bf5591684d1] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 743.474258] env[62503]: ERROR nova.compute.manager [instance: c1693c1f-6497-429c-a7f7-5bf5591684d1] for vif in network_info: [ 743.474258] env[62503]: ERROR nova.compute.manager [instance: c1693c1f-6497-429c-a7f7-5bf5591684d1] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 743.474258] env[62503]: ERROR nova.compute.manager [instance: c1693c1f-6497-429c-a7f7-5bf5591684d1] return self._sync_wrapper(fn, *args, **kwargs) [ 743.474258] env[62503]: ERROR nova.compute.manager [instance: c1693c1f-6497-429c-a7f7-5bf5591684d1] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 743.474258] env[62503]: ERROR nova.compute.manager [instance: c1693c1f-6497-429c-a7f7-5bf5591684d1] self.wait() [ 743.474258] env[62503]: ERROR nova.compute.manager [instance: c1693c1f-6497-429c-a7f7-5bf5591684d1] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 743.474258] env[62503]: ERROR nova.compute.manager [instance: c1693c1f-6497-429c-a7f7-5bf5591684d1] self[:] = self._gt.wait() [ 743.474258] env[62503]: ERROR nova.compute.manager [instance: c1693c1f-6497-429c-a7f7-5bf5591684d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 743.474258] env[62503]: ERROR nova.compute.manager [instance: c1693c1f-6497-429c-a7f7-5bf5591684d1] return self._exit_event.wait() [ 743.474258] env[62503]: ERROR nova.compute.manager [instance: c1693c1f-6497-429c-a7f7-5bf5591684d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 743.474692] env[62503]: ERROR nova.compute.manager [instance: c1693c1f-6497-429c-a7f7-5bf5591684d1] current.throw(*self._exc) [ 743.474692] env[62503]: ERROR nova.compute.manager [instance: c1693c1f-6497-429c-a7f7-5bf5591684d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 743.474692] env[62503]: ERROR nova.compute.manager [instance: c1693c1f-6497-429c-a7f7-5bf5591684d1] result = function(*args, **kwargs) [ 743.474692] env[62503]: ERROR nova.compute.manager [instance: c1693c1f-6497-429c-a7f7-5bf5591684d1] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 743.474692] env[62503]: ERROR nova.compute.manager [instance: c1693c1f-6497-429c-a7f7-5bf5591684d1] return func(*args, **kwargs) [ 743.474692] env[62503]: ERROR nova.compute.manager [instance: c1693c1f-6497-429c-a7f7-5bf5591684d1] File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 743.474692] env[62503]: ERROR nova.compute.manager [instance: c1693c1f-6497-429c-a7f7-5bf5591684d1] raise e [ 743.474692] env[62503]: ERROR nova.compute.manager [instance: c1693c1f-6497-429c-a7f7-5bf5591684d1] File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 743.474692] env[62503]: ERROR nova.compute.manager [instance: c1693c1f-6497-429c-a7f7-5bf5591684d1] nwinfo = self.network_api.allocate_for_instance( [ 743.474692] env[62503]: ERROR nova.compute.manager [instance: c1693c1f-6497-429c-a7f7-5bf5591684d1] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 743.474692] env[62503]: ERROR nova.compute.manager [instance: c1693c1f-6497-429c-a7f7-5bf5591684d1] created_port_ids = self._update_ports_for_instance( [ 743.474692] env[62503]: ERROR nova.compute.manager [instance: c1693c1f-6497-429c-a7f7-5bf5591684d1] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 743.474692] env[62503]: ERROR nova.compute.manager [instance: c1693c1f-6497-429c-a7f7-5bf5591684d1] with excutils.save_and_reraise_exception(): [ 743.475068] env[62503]: ERROR nova.compute.manager [instance: c1693c1f-6497-429c-a7f7-5bf5591684d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 743.475068] env[62503]: ERROR nova.compute.manager [instance: c1693c1f-6497-429c-a7f7-5bf5591684d1] self.force_reraise() [ 743.475068] env[62503]: ERROR nova.compute.manager [instance: c1693c1f-6497-429c-a7f7-5bf5591684d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 743.475068] env[62503]: ERROR nova.compute.manager [instance: c1693c1f-6497-429c-a7f7-5bf5591684d1] raise self.value [ 743.475068] env[62503]: ERROR nova.compute.manager [instance: c1693c1f-6497-429c-a7f7-5bf5591684d1] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 743.475068] env[62503]: ERROR nova.compute.manager [instance: c1693c1f-6497-429c-a7f7-5bf5591684d1] updated_port = self._update_port( [ 743.475068] env[62503]: ERROR nova.compute.manager [instance: c1693c1f-6497-429c-a7f7-5bf5591684d1] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 743.475068] env[62503]: ERROR nova.compute.manager [instance: c1693c1f-6497-429c-a7f7-5bf5591684d1] _ensure_no_port_binding_failure(port) [ 743.475068] env[62503]: ERROR nova.compute.manager [instance: c1693c1f-6497-429c-a7f7-5bf5591684d1] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 743.475068] env[62503]: ERROR nova.compute.manager [instance: c1693c1f-6497-429c-a7f7-5bf5591684d1] raise exception.PortBindingFailed(port_id=port['id']) [ 743.475068] env[62503]: ERROR nova.compute.manager [instance: c1693c1f-6497-429c-a7f7-5bf5591684d1] nova.exception.PortBindingFailed: Binding failed for port 4e5bf34c-de80-4cd1-9d3f-8a7735e994a3, please check neutron logs for more information. [ 743.475068] env[62503]: ERROR nova.compute.manager [instance: c1693c1f-6497-429c-a7f7-5bf5591684d1] [ 743.475415] env[62503]: DEBUG nova.compute.utils [None req-98b7d7bf-9e47-4fe2-b967-b3c7085ad444 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: c1693c1f-6497-429c-a7f7-5bf5591684d1] Binding failed for port 4e5bf34c-de80-4cd1-9d3f-8a7735e994a3, please check neutron logs for more information. {{(pid=62503) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 743.476477] env[62503]: DEBUG oslo_concurrency.lockutils [None req-bf654c76-1dff-4311-afb8-80d00a5369c0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.910s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 743.477653] env[62503]: INFO nova.compute.claims [None req-bf654c76-1dff-4311-afb8-80d00a5369c0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: 47d67edd-0860-49a6-ab7e-0511cffb82ae] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 743.484433] env[62503]: DEBUG nova.compute.manager [None req-98b7d7bf-9e47-4fe2-b967-b3c7085ad444 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: c1693c1f-6497-429c-a7f7-5bf5591684d1] Build of instance c1693c1f-6497-429c-a7f7-5bf5591684d1 was re-scheduled: Binding failed for port 4e5bf34c-de80-4cd1-9d3f-8a7735e994a3, please check neutron logs for more information. {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2483}} [ 743.484433] env[62503]: DEBUG nova.compute.manager [None req-98b7d7bf-9e47-4fe2-b967-b3c7085ad444 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: c1693c1f-6497-429c-a7f7-5bf5591684d1] Unplugging VIFs for instance {{(pid=62503) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3009}} [ 743.484433] env[62503]: DEBUG oslo_concurrency.lockutils [None req-98b7d7bf-9e47-4fe2-b967-b3c7085ad444 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Acquiring lock "refresh_cache-c1693c1f-6497-429c-a7f7-5bf5591684d1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 743.484433] env[62503]: DEBUG oslo_concurrency.lockutils [None req-98b7d7bf-9e47-4fe2-b967-b3c7085ad444 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Acquired lock "refresh_cache-c1693c1f-6497-429c-a7f7-5bf5591684d1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 743.484862] env[62503]: DEBUG nova.network.neutron [None req-98b7d7bf-9e47-4fe2-b967-b3c7085ad444 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: c1693c1f-6497-429c-a7f7-5bf5591684d1] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 743.577803] env[62503]: ERROR nova.compute.manager [None req-d48c1997-54b2-410a-b1fd-f52e7748bdfd tempest-ServerGroupTestJSON-1883967840 tempest-ServerGroupTestJSON-1883967840-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 8c89f1b3-1e7b-4a8d-94c5-7b98ceba3b34, please check neutron logs for more information. [ 743.577803] env[62503]: ERROR nova.compute.manager Traceback (most recent call last): [ 743.577803] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 743.577803] env[62503]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 743.577803] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 743.577803] env[62503]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 743.577803] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 743.577803] env[62503]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 743.577803] env[62503]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 743.577803] env[62503]: ERROR nova.compute.manager self.force_reraise() [ 743.577803] env[62503]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 743.577803] env[62503]: ERROR nova.compute.manager raise self.value [ 743.577803] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 743.577803] env[62503]: ERROR nova.compute.manager updated_port = self._update_port( [ 743.577803] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 743.577803] env[62503]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 743.578296] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 743.578296] env[62503]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 743.578296] env[62503]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 8c89f1b3-1e7b-4a8d-94c5-7b98ceba3b34, please check neutron logs for more information. [ 743.578296] env[62503]: ERROR nova.compute.manager [ 743.578296] env[62503]: Traceback (most recent call last): [ 743.578296] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 743.578296] env[62503]: listener.cb(fileno) [ 743.578296] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 743.578296] env[62503]: result = function(*args, **kwargs) [ 743.578296] env[62503]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 743.578296] env[62503]: return func(*args, **kwargs) [ 743.578296] env[62503]: File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 743.578296] env[62503]: raise e [ 743.578296] env[62503]: File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 743.578296] env[62503]: nwinfo = self.network_api.allocate_for_instance( [ 743.578296] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 743.578296] env[62503]: created_port_ids = self._update_ports_for_instance( [ 743.578296] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 743.578296] env[62503]: with excutils.save_and_reraise_exception(): [ 743.578296] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 743.578296] env[62503]: self.force_reraise() [ 743.578296] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 743.578296] env[62503]: raise self.value [ 743.578296] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 743.578296] env[62503]: updated_port = self._update_port( [ 743.578296] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 743.578296] env[62503]: _ensure_no_port_binding_failure(port) [ 743.578296] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 743.578296] env[62503]: raise exception.PortBindingFailed(port_id=port['id']) [ 743.579134] env[62503]: nova.exception.PortBindingFailed: Binding failed for port 8c89f1b3-1e7b-4a8d-94c5-7b98ceba3b34, please check neutron logs for more information. [ 743.579134] env[62503]: Removing descriptor: 16 [ 743.579134] env[62503]: ERROR nova.compute.manager [None req-d48c1997-54b2-410a-b1fd-f52e7748bdfd tempest-ServerGroupTestJSON-1883967840 tempest-ServerGroupTestJSON-1883967840-project-member] [instance: be79632e-78ca-440a-92ef-d86a9f32693e] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 8c89f1b3-1e7b-4a8d-94c5-7b98ceba3b34, please check neutron logs for more information. [ 743.579134] env[62503]: ERROR nova.compute.manager [instance: be79632e-78ca-440a-92ef-d86a9f32693e] Traceback (most recent call last): [ 743.579134] env[62503]: ERROR nova.compute.manager [instance: be79632e-78ca-440a-92ef-d86a9f32693e] File "/opt/stack/nova/nova/compute/manager.py", line 2897, in _build_resources [ 743.579134] env[62503]: ERROR nova.compute.manager [instance: be79632e-78ca-440a-92ef-d86a9f32693e] yield resources [ 743.579134] env[62503]: ERROR nova.compute.manager [instance: be79632e-78ca-440a-92ef-d86a9f32693e] File "/opt/stack/nova/nova/compute/manager.py", line 2644, in _build_and_run_instance [ 743.579134] env[62503]: ERROR nova.compute.manager [instance: be79632e-78ca-440a-92ef-d86a9f32693e] self.driver.spawn(context, instance, image_meta, [ 743.579134] env[62503]: ERROR nova.compute.manager [instance: be79632e-78ca-440a-92ef-d86a9f32693e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 743.579134] env[62503]: ERROR nova.compute.manager [instance: be79632e-78ca-440a-92ef-d86a9f32693e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 743.579134] env[62503]: ERROR nova.compute.manager [instance: be79632e-78ca-440a-92ef-d86a9f32693e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 743.579134] env[62503]: ERROR nova.compute.manager [instance: be79632e-78ca-440a-92ef-d86a9f32693e] vm_ref = self.build_virtual_machine(instance, [ 743.579479] env[62503]: ERROR nova.compute.manager [instance: be79632e-78ca-440a-92ef-d86a9f32693e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 743.579479] env[62503]: ERROR nova.compute.manager [instance: be79632e-78ca-440a-92ef-d86a9f32693e] vif_infos = vmwarevif.get_vif_info(self._session, [ 743.579479] env[62503]: ERROR nova.compute.manager [instance: be79632e-78ca-440a-92ef-d86a9f32693e] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 743.579479] env[62503]: ERROR nova.compute.manager [instance: be79632e-78ca-440a-92ef-d86a9f32693e] for vif in network_info: [ 743.579479] env[62503]: ERROR nova.compute.manager [instance: be79632e-78ca-440a-92ef-d86a9f32693e] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 743.579479] env[62503]: ERROR nova.compute.manager [instance: be79632e-78ca-440a-92ef-d86a9f32693e] return self._sync_wrapper(fn, *args, **kwargs) [ 743.579479] env[62503]: ERROR nova.compute.manager [instance: be79632e-78ca-440a-92ef-d86a9f32693e] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 743.579479] env[62503]: ERROR nova.compute.manager [instance: be79632e-78ca-440a-92ef-d86a9f32693e] self.wait() [ 743.579479] env[62503]: ERROR nova.compute.manager [instance: be79632e-78ca-440a-92ef-d86a9f32693e] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 743.579479] env[62503]: ERROR nova.compute.manager [instance: be79632e-78ca-440a-92ef-d86a9f32693e] self[:] = self._gt.wait() [ 743.579479] env[62503]: ERROR nova.compute.manager [instance: be79632e-78ca-440a-92ef-d86a9f32693e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 743.579479] env[62503]: ERROR nova.compute.manager [instance: be79632e-78ca-440a-92ef-d86a9f32693e] return self._exit_event.wait() [ 743.579479] env[62503]: ERROR nova.compute.manager [instance: be79632e-78ca-440a-92ef-d86a9f32693e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 743.579934] env[62503]: ERROR nova.compute.manager [instance: be79632e-78ca-440a-92ef-d86a9f32693e] result = hub.switch() [ 743.579934] env[62503]: ERROR nova.compute.manager [instance: be79632e-78ca-440a-92ef-d86a9f32693e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 743.579934] env[62503]: ERROR nova.compute.manager [instance: be79632e-78ca-440a-92ef-d86a9f32693e] return self.greenlet.switch() [ 743.579934] env[62503]: ERROR nova.compute.manager [instance: be79632e-78ca-440a-92ef-d86a9f32693e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 743.579934] env[62503]: ERROR nova.compute.manager [instance: be79632e-78ca-440a-92ef-d86a9f32693e] result = function(*args, **kwargs) [ 743.579934] env[62503]: ERROR nova.compute.manager [instance: be79632e-78ca-440a-92ef-d86a9f32693e] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 743.579934] env[62503]: ERROR nova.compute.manager [instance: be79632e-78ca-440a-92ef-d86a9f32693e] return func(*args, **kwargs) [ 743.579934] env[62503]: ERROR nova.compute.manager [instance: be79632e-78ca-440a-92ef-d86a9f32693e] File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 743.579934] env[62503]: ERROR nova.compute.manager [instance: be79632e-78ca-440a-92ef-d86a9f32693e] raise e [ 743.579934] env[62503]: ERROR nova.compute.manager [instance: be79632e-78ca-440a-92ef-d86a9f32693e] File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 743.579934] env[62503]: ERROR nova.compute.manager [instance: be79632e-78ca-440a-92ef-d86a9f32693e] nwinfo = self.network_api.allocate_for_instance( [ 743.579934] env[62503]: ERROR nova.compute.manager [instance: be79632e-78ca-440a-92ef-d86a9f32693e] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 743.579934] env[62503]: ERROR nova.compute.manager [instance: be79632e-78ca-440a-92ef-d86a9f32693e] created_port_ids = self._update_ports_for_instance( [ 743.580341] env[62503]: ERROR nova.compute.manager [instance: be79632e-78ca-440a-92ef-d86a9f32693e] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 743.580341] env[62503]: ERROR nova.compute.manager [instance: be79632e-78ca-440a-92ef-d86a9f32693e] with excutils.save_and_reraise_exception(): [ 743.580341] env[62503]: ERROR nova.compute.manager [instance: be79632e-78ca-440a-92ef-d86a9f32693e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 743.580341] env[62503]: ERROR nova.compute.manager [instance: be79632e-78ca-440a-92ef-d86a9f32693e] self.force_reraise() [ 743.580341] env[62503]: ERROR nova.compute.manager [instance: be79632e-78ca-440a-92ef-d86a9f32693e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 743.580341] env[62503]: ERROR nova.compute.manager [instance: be79632e-78ca-440a-92ef-d86a9f32693e] raise self.value [ 743.580341] env[62503]: ERROR nova.compute.manager [instance: be79632e-78ca-440a-92ef-d86a9f32693e] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 743.580341] env[62503]: ERROR nova.compute.manager [instance: be79632e-78ca-440a-92ef-d86a9f32693e] updated_port = self._update_port( [ 743.580341] env[62503]: ERROR nova.compute.manager [instance: be79632e-78ca-440a-92ef-d86a9f32693e] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 743.580341] env[62503]: ERROR nova.compute.manager [instance: be79632e-78ca-440a-92ef-d86a9f32693e] _ensure_no_port_binding_failure(port) [ 743.580341] env[62503]: ERROR nova.compute.manager [instance: be79632e-78ca-440a-92ef-d86a9f32693e] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 743.580341] env[62503]: ERROR nova.compute.manager [instance: be79632e-78ca-440a-92ef-d86a9f32693e] raise exception.PortBindingFailed(port_id=port['id']) [ 743.580689] env[62503]: ERROR nova.compute.manager [instance: be79632e-78ca-440a-92ef-d86a9f32693e] nova.exception.PortBindingFailed: Binding failed for port 8c89f1b3-1e7b-4a8d-94c5-7b98ceba3b34, please check neutron logs for more information. [ 743.580689] env[62503]: ERROR nova.compute.manager [instance: be79632e-78ca-440a-92ef-d86a9f32693e] [ 743.580689] env[62503]: INFO nova.compute.manager [None req-d48c1997-54b2-410a-b1fd-f52e7748bdfd tempest-ServerGroupTestJSON-1883967840 tempest-ServerGroupTestJSON-1883967840-project-member] [instance: be79632e-78ca-440a-92ef-d86a9f32693e] Terminating instance [ 743.582149] env[62503]: DEBUG oslo_concurrency.lockutils [None req-d48c1997-54b2-410a-b1fd-f52e7748bdfd tempest-ServerGroupTestJSON-1883967840 tempest-ServerGroupTestJSON-1883967840-project-member] Acquiring lock "refresh_cache-be79632e-78ca-440a-92ef-d86a9f32693e" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 743.756019] env[62503]: INFO nova.compute.manager [None req-ef39e50f-5ce8-4286-9c89-acf2f5f9a4d4 tempest-VolumesAdminNegativeTest-585860034 tempest-VolumesAdminNegativeTest-585860034-project-member] [instance: 0b40f385-db0a-460c-b7fd-47e4d6afbaf9] Took 1.03 seconds to deallocate network for instance. [ 743.875166] env[62503]: DEBUG nova.network.neutron [req-104e0038-81ab-400b-b319-3233b943b779 req-c1a068ba-3dfd-4918-8011-178a62c9f277 service nova] [instance: be79632e-78ca-440a-92ef-d86a9f32693e] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 743.983369] env[62503]: DEBUG nova.network.neutron [req-104e0038-81ab-400b-b319-3233b943b779 req-c1a068ba-3dfd-4918-8011-178a62c9f277 service nova] [instance: be79632e-78ca-440a-92ef-d86a9f32693e] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 744.015906] env[62503]: DEBUG nova.network.neutron [None req-98b7d7bf-9e47-4fe2-b967-b3c7085ad444 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: c1693c1f-6497-429c-a7f7-5bf5591684d1] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 744.120136] env[62503]: DEBUG nova.network.neutron [None req-98b7d7bf-9e47-4fe2-b967-b3c7085ad444 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: c1693c1f-6497-429c-a7f7-5bf5591684d1] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 744.488357] env[62503]: DEBUG oslo_concurrency.lockutils [req-104e0038-81ab-400b-b319-3233b943b779 req-c1a068ba-3dfd-4918-8011-178a62c9f277 service nova] Releasing lock "refresh_cache-be79632e-78ca-440a-92ef-d86a9f32693e" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 744.488728] env[62503]: DEBUG oslo_concurrency.lockutils [None req-d48c1997-54b2-410a-b1fd-f52e7748bdfd tempest-ServerGroupTestJSON-1883967840 tempest-ServerGroupTestJSON-1883967840-project-member] Acquired lock "refresh_cache-be79632e-78ca-440a-92ef-d86a9f32693e" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 744.488933] env[62503]: DEBUG nova.network.neutron [None req-d48c1997-54b2-410a-b1fd-f52e7748bdfd tempest-ServerGroupTestJSON-1883967840 tempest-ServerGroupTestJSON-1883967840-project-member] [instance: be79632e-78ca-440a-92ef-d86a9f32693e] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 744.559798] env[62503]: DEBUG oslo_concurrency.lockutils [None req-aaad013a-a4dc-432c-87d1-a452b2fca374 tempest-ServerPasswordTestJSON-330364349 tempest-ServerPasswordTestJSON-330364349-project-member] Acquiring lock "b1fc7438-2078-435a-9754-19a8a1bc6f5c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 744.560037] env[62503]: DEBUG oslo_concurrency.lockutils [None req-aaad013a-a4dc-432c-87d1-a452b2fca374 tempest-ServerPasswordTestJSON-330364349 tempest-ServerPasswordTestJSON-330364349-project-member] Lock "b1fc7438-2078-435a-9754-19a8a1bc6f5c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 744.623147] env[62503]: DEBUG oslo_concurrency.lockutils [None req-98b7d7bf-9e47-4fe2-b967-b3c7085ad444 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Releasing lock "refresh_cache-c1693c1f-6497-429c-a7f7-5bf5591684d1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 744.623147] env[62503]: DEBUG nova.compute.manager [None req-98b7d7bf-9e47-4fe2-b967-b3c7085ad444 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62503) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3032}} [ 744.623147] env[62503]: DEBUG nova.compute.manager [None req-98b7d7bf-9e47-4fe2-b967-b3c7085ad444 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: c1693c1f-6497-429c-a7f7-5bf5591684d1] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 744.623147] env[62503]: DEBUG nova.network.neutron [None req-98b7d7bf-9e47-4fe2-b967-b3c7085ad444 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: c1693c1f-6497-429c-a7f7-5bf5591684d1] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 744.641129] env[62503]: DEBUG nova.network.neutron [None req-98b7d7bf-9e47-4fe2-b967-b3c7085ad444 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: c1693c1f-6497-429c-a7f7-5bf5591684d1] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 744.785799] env[62503]: INFO nova.scheduler.client.report [None req-ef39e50f-5ce8-4286-9c89-acf2f5f9a4d4 tempest-VolumesAdminNegativeTest-585860034 tempest-VolumesAdminNegativeTest-585860034-project-member] Deleted allocations for instance 0b40f385-db0a-460c-b7fd-47e4d6afbaf9 [ 744.792299] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c0e624d-04d1-4ae4-85b3-505caf4ae6ae {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.800100] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4354af3-dc9b-431a-ae70-767d0ddec15a {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.831733] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-078c3764-80bc-4306-8e1b-0e0b3dec5d1f {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.839403] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2b20236-43ed-4c6f-bc11-0997e537fd55 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.852923] env[62503]: DEBUG nova.compute.provider_tree [None req-bf654c76-1dff-4311-afb8-80d00a5369c0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 745.013623] env[62503]: DEBUG nova.network.neutron [None req-d48c1997-54b2-410a-b1fd-f52e7748bdfd tempest-ServerGroupTestJSON-1883967840 tempest-ServerGroupTestJSON-1883967840-project-member] [instance: be79632e-78ca-440a-92ef-d86a9f32693e] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 745.104924] env[62503]: DEBUG nova.network.neutron [None req-d48c1997-54b2-410a-b1fd-f52e7748bdfd tempest-ServerGroupTestJSON-1883967840 tempest-ServerGroupTestJSON-1883967840-project-member] [instance: be79632e-78ca-440a-92ef-d86a9f32693e] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 745.143610] env[62503]: DEBUG nova.network.neutron [None req-98b7d7bf-9e47-4fe2-b967-b3c7085ad444 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: c1693c1f-6497-429c-a7f7-5bf5591684d1] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 745.298023] env[62503]: DEBUG oslo_concurrency.lockutils [None req-ef39e50f-5ce8-4286-9c89-acf2f5f9a4d4 tempest-VolumesAdminNegativeTest-585860034 tempest-VolumesAdminNegativeTest-585860034-project-member] Lock "0b40f385-db0a-460c-b7fd-47e4d6afbaf9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 127.131s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 745.355457] env[62503]: DEBUG nova.scheduler.client.report [None req-bf654c76-1dff-4311-afb8-80d00a5369c0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 745.371185] env[62503]: DEBUG nova.compute.manager [req-c89666ce-e27f-4252-a756-c4085223515a req-59b5980b-422b-4a7c-ae0d-4ee35c2e9392 service nova] [instance: be79632e-78ca-440a-92ef-d86a9f32693e] Received event network-vif-deleted-8c89f1b3-1e7b-4a8d-94c5-7b98ceba3b34 {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 745.607315] env[62503]: DEBUG oslo_concurrency.lockutils [None req-d48c1997-54b2-410a-b1fd-f52e7748bdfd tempest-ServerGroupTestJSON-1883967840 tempest-ServerGroupTestJSON-1883967840-project-member] Releasing lock "refresh_cache-be79632e-78ca-440a-92ef-d86a9f32693e" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 745.607777] env[62503]: DEBUG nova.compute.manager [None req-d48c1997-54b2-410a-b1fd-f52e7748bdfd tempest-ServerGroupTestJSON-1883967840 tempest-ServerGroupTestJSON-1883967840-project-member] [instance: be79632e-78ca-440a-92ef-d86a9f32693e] Start destroying the instance on the hypervisor. {{(pid=62503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3156}} [ 745.608017] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-d48c1997-54b2-410a-b1fd-f52e7748bdfd tempest-ServerGroupTestJSON-1883967840 tempest-ServerGroupTestJSON-1883967840-project-member] [instance: be79632e-78ca-440a-92ef-d86a9f32693e] Destroying instance {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 745.608541] env[62503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4def9acd-af20-4aaf-bde2-4c3b6b2e2675 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.618274] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35f67ea1-60ab-4765-9db3-aca3cf2c406c {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.639567] env[62503]: WARNING nova.virt.vmwareapi.vmops [None req-d48c1997-54b2-410a-b1fd-f52e7748bdfd tempest-ServerGroupTestJSON-1883967840 tempest-ServerGroupTestJSON-1883967840-project-member] [instance: be79632e-78ca-440a-92ef-d86a9f32693e] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance be79632e-78ca-440a-92ef-d86a9f32693e could not be found. [ 745.639755] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-d48c1997-54b2-410a-b1fd-f52e7748bdfd tempest-ServerGroupTestJSON-1883967840 tempest-ServerGroupTestJSON-1883967840-project-member] [instance: be79632e-78ca-440a-92ef-d86a9f32693e] Instance destroyed {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 745.639837] env[62503]: INFO nova.compute.manager [None req-d48c1997-54b2-410a-b1fd-f52e7748bdfd tempest-ServerGroupTestJSON-1883967840 tempest-ServerGroupTestJSON-1883967840-project-member] [instance: be79632e-78ca-440a-92ef-d86a9f32693e] Took 0.03 seconds to destroy the instance on the hypervisor. [ 745.640091] env[62503]: DEBUG oslo.service.loopingcall [None req-d48c1997-54b2-410a-b1fd-f52e7748bdfd tempest-ServerGroupTestJSON-1883967840 tempest-ServerGroupTestJSON-1883967840-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 745.640306] env[62503]: DEBUG nova.compute.manager [-] [instance: be79632e-78ca-440a-92ef-d86a9f32693e] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 745.640402] env[62503]: DEBUG nova.network.neutron [-] [instance: be79632e-78ca-440a-92ef-d86a9f32693e] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 745.646560] env[62503]: INFO nova.compute.manager [None req-98b7d7bf-9e47-4fe2-b967-b3c7085ad444 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: c1693c1f-6497-429c-a7f7-5bf5591684d1] Took 1.02 seconds to deallocate network for instance. [ 745.664091] env[62503]: DEBUG nova.network.neutron [-] [instance: be79632e-78ca-440a-92ef-d86a9f32693e] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 745.771054] env[62503]: DEBUG oslo_service.periodic_task [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 745.771303] env[62503]: DEBUG oslo_service.periodic_task [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 745.800318] env[62503]: DEBUG nova.compute.manager [None req-fc106d5b-533d-42f2-9e23-d15b663f98ba tempest-ServerDiagnosticsNegativeTest-1726197908 tempest-ServerDiagnosticsNegativeTest-1726197908-project-member] [instance: 529e6f8e-49b9-46a7-a09f-17238522f7bc] Starting instance... {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 745.862144] env[62503]: DEBUG oslo_concurrency.lockutils [None req-bf654c76-1dff-4311-afb8-80d00a5369c0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.385s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 745.862144] env[62503]: DEBUG nova.compute.manager [None req-bf654c76-1dff-4311-afb8-80d00a5369c0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: 47d67edd-0860-49a6-ab7e-0511cffb82ae] Start building networks asynchronously for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2832}} [ 745.863964] env[62503]: DEBUG oslo_concurrency.lockutils [None req-57a38f06-0431-4714-9351-3e81164f0fed tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.420s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 745.867982] env[62503]: INFO nova.compute.claims [None req-57a38f06-0431-4714-9351-3e81164f0fed tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] [instance: 141d7d04-0267-4e15-90ed-112ac8fb8c9b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 746.165236] env[62503]: DEBUG nova.network.neutron [-] [instance: be79632e-78ca-440a-92ef-d86a9f32693e] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 746.276687] env[62503]: DEBUG oslo_service.periodic_task [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 746.276872] env[62503]: DEBUG nova.compute.manager [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Starting heal instance info cache {{(pid=62503) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10224}} [ 746.276989] env[62503]: DEBUG nova.compute.manager [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Rebuilding the list of instances to heal {{(pid=62503) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10228}} [ 746.328073] env[62503]: DEBUG oslo_concurrency.lockutils [None req-fc106d5b-533d-42f2-9e23-d15b663f98ba tempest-ServerDiagnosticsNegativeTest-1726197908 tempest-ServerDiagnosticsNegativeTest-1726197908-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 746.371018] env[62503]: DEBUG nova.compute.utils [None req-bf654c76-1dff-4311-afb8-80d00a5369c0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Using /dev/sd instead of None {{(pid=62503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 746.373049] env[62503]: DEBUG nova.compute.manager [None req-bf654c76-1dff-4311-afb8-80d00a5369c0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: 47d67edd-0860-49a6-ab7e-0511cffb82ae] Allocating IP information in the background. {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 746.373342] env[62503]: DEBUG nova.network.neutron [None req-bf654c76-1dff-4311-afb8-80d00a5369c0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: 47d67edd-0860-49a6-ab7e-0511cffb82ae] allocate_for_instance() {{(pid=62503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 746.448858] env[62503]: DEBUG nova.policy [None req-bf654c76-1dff-4311-afb8-80d00a5369c0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b0e90b6b4c414583af760c03e336e4d1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f823912f7b1a4998a6dbc22060cf6c5e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62503) authorize /opt/stack/nova/nova/policy.py:201}} [ 746.668147] env[62503]: INFO nova.compute.manager [-] [instance: be79632e-78ca-440a-92ef-d86a9f32693e] Took 1.03 seconds to deallocate network for instance. [ 746.672783] env[62503]: DEBUG nova.compute.claims [None req-d48c1997-54b2-410a-b1fd-f52e7748bdfd tempest-ServerGroupTestJSON-1883967840 tempest-ServerGroupTestJSON-1883967840-project-member] [instance: be79632e-78ca-440a-92ef-d86a9f32693e] Aborting claim: {{(pid=62503) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 746.672988] env[62503]: DEBUG oslo_concurrency.lockutils [None req-d48c1997-54b2-410a-b1fd-f52e7748bdfd tempest-ServerGroupTestJSON-1883967840 tempest-ServerGroupTestJSON-1883967840-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 746.676750] env[62503]: INFO nova.scheduler.client.report [None req-98b7d7bf-9e47-4fe2-b967-b3c7085ad444 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Deleted allocations for instance c1693c1f-6497-429c-a7f7-5bf5591684d1 [ 746.780159] env[62503]: DEBUG nova.compute.manager [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] [instance: be79632e-78ca-440a-92ef-d86a9f32693e] Skipping network cache update for instance because it is Building. {{(pid=62503) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10237}} [ 746.780319] env[62503]: DEBUG nova.compute.manager [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] [instance: 47d67edd-0860-49a6-ab7e-0511cffb82ae] Skipping network cache update for instance because it is Building. {{(pid=62503) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10237}} [ 746.780449] env[62503]: DEBUG nova.compute.manager [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] [instance: 141d7d04-0267-4e15-90ed-112ac8fb8c9b] Skipping network cache update for instance because it is Building. {{(pid=62503) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10237}} [ 746.780575] env[62503]: DEBUG nova.compute.manager [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Didn't find any instances for network info cache update. {{(pid=62503) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10310}} [ 746.780775] env[62503]: DEBUG oslo_service.periodic_task [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 746.781136] env[62503]: DEBUG oslo_service.periodic_task [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 746.781217] env[62503]: DEBUG oslo_service.periodic_task [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 746.781367] env[62503]: DEBUG oslo_service.periodic_task [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 746.781524] env[62503]: DEBUG oslo_service.periodic_task [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 746.781670] env[62503]: DEBUG oslo_service.periodic_task [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 746.781799] env[62503]: DEBUG nova.compute.manager [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62503) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10843}} [ 746.781940] env[62503]: DEBUG oslo_service.periodic_task [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 746.877992] env[62503]: DEBUG nova.compute.manager [None req-bf654c76-1dff-4311-afb8-80d00a5369c0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: 47d67edd-0860-49a6-ab7e-0511cffb82ae] Start building block device mappings for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2867}} [ 746.910706] env[62503]: DEBUG nova.network.neutron [None req-bf654c76-1dff-4311-afb8-80d00a5369c0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: 47d67edd-0860-49a6-ab7e-0511cffb82ae] Successfully created port: ba51a328-d1e0-4749-a304-d9a9c54c44d8 {{(pid=62503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 747.190123] env[62503]: DEBUG oslo_concurrency.lockutils [None req-98b7d7bf-9e47-4fe2-b967-b3c7085ad444 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Lock "c1693c1f-6497-429c-a7f7-5bf5591684d1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 107.830s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 747.202405] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fc1baa7-9964-4807-bdae-3f79cde2cb69 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.213613] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-035ed2e6-cb41-4c2a-99bb-d2dbceb8083f {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.249890] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12373379-1711-47db-abbb-e37db5f3d4ed {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.257639] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e792b1c5-f0c4-4cdd-b52b-da5c52ff3ca7 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.272076] env[62503]: DEBUG nova.compute.provider_tree [None req-57a38f06-0431-4714-9351-3e81164f0fed tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 747.285327] env[62503]: DEBUG oslo_concurrency.lockutils [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 747.693191] env[62503]: DEBUG nova.compute.manager [None req-ecc5c12d-2a3e-430a-9c3c-15d45f1a4068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: ad7badc9-cb11-4532-885a-28fb3d4de9ef] Starting instance... {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 747.778683] env[62503]: DEBUG nova.scheduler.client.report [None req-57a38f06-0431-4714-9351-3e81164f0fed tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 747.890822] env[62503]: DEBUG nova.compute.manager [req-f5050c24-38e6-412c-8414-bc189a6cc57d req-990e60e9-5a6f-4e7f-bee6-3e465a54ed50 service nova] [instance: 47d67edd-0860-49a6-ab7e-0511cffb82ae] Received event network-changed-ba51a328-d1e0-4749-a304-d9a9c54c44d8 {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 747.890822] env[62503]: DEBUG nova.compute.manager [req-f5050c24-38e6-412c-8414-bc189a6cc57d req-990e60e9-5a6f-4e7f-bee6-3e465a54ed50 service nova] [instance: 47d67edd-0860-49a6-ab7e-0511cffb82ae] Refreshing instance network info cache due to event network-changed-ba51a328-d1e0-4749-a304-d9a9c54c44d8. {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11432}} [ 747.890822] env[62503]: DEBUG oslo_concurrency.lockutils [req-f5050c24-38e6-412c-8414-bc189a6cc57d req-990e60e9-5a6f-4e7f-bee6-3e465a54ed50 service nova] Acquiring lock "refresh_cache-47d67edd-0860-49a6-ab7e-0511cffb82ae" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 747.890822] env[62503]: DEBUG oslo_concurrency.lockutils [req-f5050c24-38e6-412c-8414-bc189a6cc57d req-990e60e9-5a6f-4e7f-bee6-3e465a54ed50 service nova] Acquired lock "refresh_cache-47d67edd-0860-49a6-ab7e-0511cffb82ae" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 747.891216] env[62503]: DEBUG nova.network.neutron [req-f5050c24-38e6-412c-8414-bc189a6cc57d req-990e60e9-5a6f-4e7f-bee6-3e465a54ed50 service nova] [instance: 47d67edd-0860-49a6-ab7e-0511cffb82ae] Refreshing network info cache for port ba51a328-d1e0-4749-a304-d9a9c54c44d8 {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 747.893076] env[62503]: DEBUG nova.compute.manager [None req-bf654c76-1dff-4311-afb8-80d00a5369c0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: 47d67edd-0860-49a6-ab7e-0511cffb82ae] Start spawning the instance on the hypervisor. {{(pid=62503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2641}} [ 747.922809] env[62503]: DEBUG nova.virt.hardware [None req-bf654c76-1dff-4311-afb8-80d00a5369c0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:26:20Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:26:03Z,direct_url=,disk_format='vmdk',id=8150ca02-f879-471d-8913-459408f127a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26d9ee75d25b4018b8bb1fb11c8bd98c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:26:04Z,virtual_size=,visibility=), allow threads: False {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 747.923237] env[62503]: DEBUG nova.virt.hardware [None req-bf654c76-1dff-4311-afb8-80d00a5369c0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Flavor limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 747.923237] env[62503]: DEBUG nova.virt.hardware [None req-bf654c76-1dff-4311-afb8-80d00a5369c0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Image limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 747.924093] env[62503]: DEBUG nova.virt.hardware [None req-bf654c76-1dff-4311-afb8-80d00a5369c0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Flavor pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 747.924093] env[62503]: DEBUG nova.virt.hardware [None req-bf654c76-1dff-4311-afb8-80d00a5369c0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Image pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 747.924093] env[62503]: DEBUG nova.virt.hardware [None req-bf654c76-1dff-4311-afb8-80d00a5369c0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 747.924093] env[62503]: DEBUG nova.virt.hardware [None req-bf654c76-1dff-4311-afb8-80d00a5369c0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 747.924093] env[62503]: DEBUG nova.virt.hardware [None req-bf654c76-1dff-4311-afb8-80d00a5369c0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 747.924360] env[62503]: DEBUG nova.virt.hardware [None req-bf654c76-1dff-4311-afb8-80d00a5369c0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Got 1 possible topologies {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 747.924360] env[62503]: DEBUG nova.virt.hardware [None req-bf654c76-1dff-4311-afb8-80d00a5369c0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 747.924515] env[62503]: DEBUG nova.virt.hardware [None req-bf654c76-1dff-4311-afb8-80d00a5369c0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 747.925986] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc3b81e1-9094-4927-959a-c0c6a90c5c40 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.934727] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63afb749-7d21-4835-8596-fcdf55351ae8 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.063022] env[62503]: ERROR nova.compute.manager [None req-bf654c76-1dff-4311-afb8-80d00a5369c0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port ba51a328-d1e0-4749-a304-d9a9c54c44d8, please check neutron logs for more information. [ 748.063022] env[62503]: ERROR nova.compute.manager Traceback (most recent call last): [ 748.063022] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 748.063022] env[62503]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 748.063022] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 748.063022] env[62503]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 748.063022] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 748.063022] env[62503]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 748.063022] env[62503]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 748.063022] env[62503]: ERROR nova.compute.manager self.force_reraise() [ 748.063022] env[62503]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 748.063022] env[62503]: ERROR nova.compute.manager raise self.value [ 748.063022] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 748.063022] env[62503]: ERROR nova.compute.manager updated_port = self._update_port( [ 748.063022] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 748.063022] env[62503]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 748.066839] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 748.066839] env[62503]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 748.066839] env[62503]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port ba51a328-d1e0-4749-a304-d9a9c54c44d8, please check neutron logs for more information. [ 748.066839] env[62503]: ERROR nova.compute.manager [ 748.066839] env[62503]: Traceback (most recent call last): [ 748.066839] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 748.066839] env[62503]: listener.cb(fileno) [ 748.066839] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 748.066839] env[62503]: result = function(*args, **kwargs) [ 748.066839] env[62503]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 748.066839] env[62503]: return func(*args, **kwargs) [ 748.066839] env[62503]: File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 748.066839] env[62503]: raise e [ 748.066839] env[62503]: File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 748.066839] env[62503]: nwinfo = self.network_api.allocate_for_instance( [ 748.066839] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 748.066839] env[62503]: created_port_ids = self._update_ports_for_instance( [ 748.066839] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 748.066839] env[62503]: with excutils.save_and_reraise_exception(): [ 748.066839] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 748.066839] env[62503]: self.force_reraise() [ 748.066839] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 748.066839] env[62503]: raise self.value [ 748.066839] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 748.066839] env[62503]: updated_port = self._update_port( [ 748.066839] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 748.066839] env[62503]: _ensure_no_port_binding_failure(port) [ 748.066839] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 748.066839] env[62503]: raise exception.PortBindingFailed(port_id=port['id']) [ 748.068379] env[62503]: nova.exception.PortBindingFailed: Binding failed for port ba51a328-d1e0-4749-a304-d9a9c54c44d8, please check neutron logs for more information. [ 748.068379] env[62503]: Removing descriptor: 16 [ 748.068379] env[62503]: ERROR nova.compute.manager [None req-bf654c76-1dff-4311-afb8-80d00a5369c0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: 47d67edd-0860-49a6-ab7e-0511cffb82ae] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port ba51a328-d1e0-4749-a304-d9a9c54c44d8, please check neutron logs for more information. [ 748.068379] env[62503]: ERROR nova.compute.manager [instance: 47d67edd-0860-49a6-ab7e-0511cffb82ae] Traceback (most recent call last): [ 748.068379] env[62503]: ERROR nova.compute.manager [instance: 47d67edd-0860-49a6-ab7e-0511cffb82ae] File "/opt/stack/nova/nova/compute/manager.py", line 2897, in _build_resources [ 748.068379] env[62503]: ERROR nova.compute.manager [instance: 47d67edd-0860-49a6-ab7e-0511cffb82ae] yield resources [ 748.068379] env[62503]: ERROR nova.compute.manager [instance: 47d67edd-0860-49a6-ab7e-0511cffb82ae] File "/opt/stack/nova/nova/compute/manager.py", line 2644, in _build_and_run_instance [ 748.068379] env[62503]: ERROR nova.compute.manager [instance: 47d67edd-0860-49a6-ab7e-0511cffb82ae] self.driver.spawn(context, instance, image_meta, [ 748.068379] env[62503]: ERROR nova.compute.manager [instance: 47d67edd-0860-49a6-ab7e-0511cffb82ae] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 748.068379] env[62503]: ERROR nova.compute.manager [instance: 47d67edd-0860-49a6-ab7e-0511cffb82ae] self._vmops.spawn(context, instance, image_meta, injected_files, [ 748.068379] env[62503]: ERROR nova.compute.manager [instance: 47d67edd-0860-49a6-ab7e-0511cffb82ae] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 748.068379] env[62503]: ERROR nova.compute.manager [instance: 47d67edd-0860-49a6-ab7e-0511cffb82ae] vm_ref = self.build_virtual_machine(instance, [ 748.069524] env[62503]: ERROR nova.compute.manager [instance: 47d67edd-0860-49a6-ab7e-0511cffb82ae] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 748.069524] env[62503]: ERROR nova.compute.manager [instance: 47d67edd-0860-49a6-ab7e-0511cffb82ae] vif_infos = vmwarevif.get_vif_info(self._session, [ 748.069524] env[62503]: ERROR nova.compute.manager [instance: 47d67edd-0860-49a6-ab7e-0511cffb82ae] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 748.069524] env[62503]: ERROR nova.compute.manager [instance: 47d67edd-0860-49a6-ab7e-0511cffb82ae] for vif in network_info: [ 748.069524] env[62503]: ERROR nova.compute.manager [instance: 47d67edd-0860-49a6-ab7e-0511cffb82ae] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 748.069524] env[62503]: ERROR nova.compute.manager [instance: 47d67edd-0860-49a6-ab7e-0511cffb82ae] return self._sync_wrapper(fn, *args, **kwargs) [ 748.069524] env[62503]: ERROR nova.compute.manager [instance: 47d67edd-0860-49a6-ab7e-0511cffb82ae] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 748.069524] env[62503]: ERROR nova.compute.manager [instance: 47d67edd-0860-49a6-ab7e-0511cffb82ae] self.wait() [ 748.069524] env[62503]: ERROR nova.compute.manager [instance: 47d67edd-0860-49a6-ab7e-0511cffb82ae] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 748.069524] env[62503]: ERROR nova.compute.manager [instance: 47d67edd-0860-49a6-ab7e-0511cffb82ae] self[:] = self._gt.wait() [ 748.069524] env[62503]: ERROR nova.compute.manager [instance: 47d67edd-0860-49a6-ab7e-0511cffb82ae] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 748.069524] env[62503]: ERROR nova.compute.manager [instance: 47d67edd-0860-49a6-ab7e-0511cffb82ae] return self._exit_event.wait() [ 748.069524] env[62503]: ERROR nova.compute.manager [instance: 47d67edd-0860-49a6-ab7e-0511cffb82ae] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 748.070549] env[62503]: ERROR nova.compute.manager [instance: 47d67edd-0860-49a6-ab7e-0511cffb82ae] result = hub.switch() [ 748.070549] env[62503]: ERROR nova.compute.manager [instance: 47d67edd-0860-49a6-ab7e-0511cffb82ae] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 748.070549] env[62503]: ERROR nova.compute.manager [instance: 47d67edd-0860-49a6-ab7e-0511cffb82ae] return self.greenlet.switch() [ 748.070549] env[62503]: ERROR nova.compute.manager [instance: 47d67edd-0860-49a6-ab7e-0511cffb82ae] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 748.070549] env[62503]: ERROR nova.compute.manager [instance: 47d67edd-0860-49a6-ab7e-0511cffb82ae] result = function(*args, **kwargs) [ 748.070549] env[62503]: ERROR nova.compute.manager [instance: 47d67edd-0860-49a6-ab7e-0511cffb82ae] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 748.070549] env[62503]: ERROR nova.compute.manager [instance: 47d67edd-0860-49a6-ab7e-0511cffb82ae] return func(*args, **kwargs) [ 748.070549] env[62503]: ERROR nova.compute.manager [instance: 47d67edd-0860-49a6-ab7e-0511cffb82ae] File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 748.070549] env[62503]: ERROR nova.compute.manager [instance: 47d67edd-0860-49a6-ab7e-0511cffb82ae] raise e [ 748.070549] env[62503]: ERROR nova.compute.manager [instance: 47d67edd-0860-49a6-ab7e-0511cffb82ae] File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 748.070549] env[62503]: ERROR nova.compute.manager [instance: 47d67edd-0860-49a6-ab7e-0511cffb82ae] nwinfo = self.network_api.allocate_for_instance( [ 748.070549] env[62503]: ERROR nova.compute.manager [instance: 47d67edd-0860-49a6-ab7e-0511cffb82ae] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 748.070549] env[62503]: ERROR nova.compute.manager [instance: 47d67edd-0860-49a6-ab7e-0511cffb82ae] created_port_ids = self._update_ports_for_instance( [ 748.071461] env[62503]: ERROR nova.compute.manager [instance: 47d67edd-0860-49a6-ab7e-0511cffb82ae] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 748.071461] env[62503]: ERROR nova.compute.manager [instance: 47d67edd-0860-49a6-ab7e-0511cffb82ae] with excutils.save_and_reraise_exception(): [ 748.071461] env[62503]: ERROR nova.compute.manager [instance: 47d67edd-0860-49a6-ab7e-0511cffb82ae] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 748.071461] env[62503]: ERROR nova.compute.manager [instance: 47d67edd-0860-49a6-ab7e-0511cffb82ae] self.force_reraise() [ 748.071461] env[62503]: ERROR nova.compute.manager [instance: 47d67edd-0860-49a6-ab7e-0511cffb82ae] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 748.071461] env[62503]: ERROR nova.compute.manager [instance: 47d67edd-0860-49a6-ab7e-0511cffb82ae] raise self.value [ 748.071461] env[62503]: ERROR nova.compute.manager [instance: 47d67edd-0860-49a6-ab7e-0511cffb82ae] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 748.071461] env[62503]: ERROR nova.compute.manager [instance: 47d67edd-0860-49a6-ab7e-0511cffb82ae] updated_port = self._update_port( [ 748.071461] env[62503]: ERROR nova.compute.manager [instance: 47d67edd-0860-49a6-ab7e-0511cffb82ae] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 748.071461] env[62503]: ERROR nova.compute.manager [instance: 47d67edd-0860-49a6-ab7e-0511cffb82ae] _ensure_no_port_binding_failure(port) [ 748.071461] env[62503]: ERROR nova.compute.manager [instance: 47d67edd-0860-49a6-ab7e-0511cffb82ae] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 748.071461] env[62503]: ERROR nova.compute.manager [instance: 47d67edd-0860-49a6-ab7e-0511cffb82ae] raise exception.PortBindingFailed(port_id=port['id']) [ 748.072270] env[62503]: ERROR nova.compute.manager [instance: 47d67edd-0860-49a6-ab7e-0511cffb82ae] nova.exception.PortBindingFailed: Binding failed for port ba51a328-d1e0-4749-a304-d9a9c54c44d8, please check neutron logs for more information. [ 748.072270] env[62503]: ERROR nova.compute.manager [instance: 47d67edd-0860-49a6-ab7e-0511cffb82ae] [ 748.072270] env[62503]: INFO nova.compute.manager [None req-bf654c76-1dff-4311-afb8-80d00a5369c0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: 47d67edd-0860-49a6-ab7e-0511cffb82ae] Terminating instance [ 748.072270] env[62503]: DEBUG oslo_concurrency.lockutils [None req-bf654c76-1dff-4311-afb8-80d00a5369c0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Acquiring lock "refresh_cache-47d67edd-0860-49a6-ab7e-0511cffb82ae" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 748.221626] env[62503]: DEBUG oslo_concurrency.lockutils [None req-ecc5c12d-2a3e-430a-9c3c-15d45f1a4068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 748.284863] env[62503]: DEBUG oslo_concurrency.lockutils [None req-57a38f06-0431-4714-9351-3e81164f0fed tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.421s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 748.285350] env[62503]: DEBUG nova.compute.manager [None req-57a38f06-0431-4714-9351-3e81164f0fed tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] [instance: 141d7d04-0267-4e15-90ed-112ac8fb8c9b] Start building networks asynchronously for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2832}} [ 748.291070] env[62503]: DEBUG oslo_concurrency.lockutils [None req-8c63ebde-167f-4614-8d90-7622283ae73c tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.929s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 748.292416] env[62503]: INFO nova.compute.claims [None req-8c63ebde-167f-4614-8d90-7622283ae73c tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] [instance: d4990c66-63d5-43b0-8187-2074c99ccde2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 748.426150] env[62503]: DEBUG nova.network.neutron [req-f5050c24-38e6-412c-8414-bc189a6cc57d req-990e60e9-5a6f-4e7f-bee6-3e465a54ed50 service nova] [instance: 47d67edd-0860-49a6-ab7e-0511cffb82ae] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 748.564657] env[62503]: DEBUG nova.network.neutron [req-f5050c24-38e6-412c-8414-bc189a6cc57d req-990e60e9-5a6f-4e7f-bee6-3e465a54ed50 service nova] [instance: 47d67edd-0860-49a6-ab7e-0511cffb82ae] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 748.797959] env[62503]: DEBUG nova.compute.utils [None req-57a38f06-0431-4714-9351-3e81164f0fed tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Using /dev/sd instead of None {{(pid=62503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 748.802424] env[62503]: DEBUG nova.compute.manager [None req-57a38f06-0431-4714-9351-3e81164f0fed tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] [instance: 141d7d04-0267-4e15-90ed-112ac8fb8c9b] Not allocating networking since 'none' was specified. {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1980}} [ 749.036022] env[62503]: DEBUG oslo_concurrency.lockutils [None req-d66ce770-1029-4d34-900e-8c450c39e993 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Acquiring lock "4deb28e7-351b-41b7-90bb-afdde200f7fa" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 749.036022] env[62503]: DEBUG oslo_concurrency.lockutils [None req-d66ce770-1029-4d34-900e-8c450c39e993 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Lock "4deb28e7-351b-41b7-90bb-afdde200f7fa" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 749.067457] env[62503]: DEBUG oslo_concurrency.lockutils [req-f5050c24-38e6-412c-8414-bc189a6cc57d req-990e60e9-5a6f-4e7f-bee6-3e465a54ed50 service nova] Releasing lock "refresh_cache-47d67edd-0860-49a6-ab7e-0511cffb82ae" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 749.068173] env[62503]: DEBUG oslo_concurrency.lockutils [None req-bf654c76-1dff-4311-afb8-80d00a5369c0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Acquired lock "refresh_cache-47d67edd-0860-49a6-ab7e-0511cffb82ae" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 749.068363] env[62503]: DEBUG nova.network.neutron [None req-bf654c76-1dff-4311-afb8-80d00a5369c0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: 47d67edd-0860-49a6-ab7e-0511cffb82ae] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 749.307121] env[62503]: DEBUG nova.compute.manager [None req-57a38f06-0431-4714-9351-3e81164f0fed tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] [instance: 141d7d04-0267-4e15-90ed-112ac8fb8c9b] Start building block device mappings for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2867}} [ 749.587700] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aea8e69e-7873-4b82-aba8-12e2a780220c {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.594074] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab62e6b1-d804-477e-9c68-a75cb219ce77 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.599231] env[62503]: DEBUG nova.network.neutron [None req-bf654c76-1dff-4311-afb8-80d00a5369c0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: 47d67edd-0860-49a6-ab7e-0511cffb82ae] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 749.626248] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d50e97e9-0be4-4bcd-bdd1-10640994a5fa {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.634030] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-254e3219-ec3b-4d44-8248-1230680be1e3 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.647408] env[62503]: DEBUG nova.compute.provider_tree [None req-8c63ebde-167f-4614-8d90-7622283ae73c tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 749.734214] env[62503]: DEBUG nova.network.neutron [None req-bf654c76-1dff-4311-afb8-80d00a5369c0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: 47d67edd-0860-49a6-ab7e-0511cffb82ae] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 749.919150] env[62503]: DEBUG nova.compute.manager [req-63919b28-855f-4fd7-a8ba-3b2f299193eb req-ebbc73ae-c1ce-4f0d-9c07-90d01028880f service nova] [instance: 47d67edd-0860-49a6-ab7e-0511cffb82ae] Received event network-vif-deleted-ba51a328-d1e0-4749-a304-d9a9c54c44d8 {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 750.150983] env[62503]: DEBUG nova.scheduler.client.report [None req-8c63ebde-167f-4614-8d90-7622283ae73c tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 750.237628] env[62503]: DEBUG oslo_concurrency.lockutils [None req-bf654c76-1dff-4311-afb8-80d00a5369c0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Releasing lock "refresh_cache-47d67edd-0860-49a6-ab7e-0511cffb82ae" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 750.237628] env[62503]: DEBUG nova.compute.manager [None req-bf654c76-1dff-4311-afb8-80d00a5369c0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: 47d67edd-0860-49a6-ab7e-0511cffb82ae] Start destroying the instance on the hypervisor. {{(pid=62503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3156}} [ 750.237628] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-bf654c76-1dff-4311-afb8-80d00a5369c0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: 47d67edd-0860-49a6-ab7e-0511cffb82ae] Destroying instance {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 750.237945] env[62503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-abe33298-75aa-4340-98ec-a682f5e9f5c6 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.247140] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3717108b-896e-4567-bc09-bf8946327936 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.269815] env[62503]: WARNING nova.virt.vmwareapi.vmops [None req-bf654c76-1dff-4311-afb8-80d00a5369c0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: 47d67edd-0860-49a6-ab7e-0511cffb82ae] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 47d67edd-0860-49a6-ab7e-0511cffb82ae could not be found. [ 750.270099] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-bf654c76-1dff-4311-afb8-80d00a5369c0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: 47d67edd-0860-49a6-ab7e-0511cffb82ae] Instance destroyed {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 750.270286] env[62503]: INFO nova.compute.manager [None req-bf654c76-1dff-4311-afb8-80d00a5369c0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: 47d67edd-0860-49a6-ab7e-0511cffb82ae] Took 0.03 seconds to destroy the instance on the hypervisor. [ 750.270539] env[62503]: DEBUG oslo.service.loopingcall [None req-bf654c76-1dff-4311-afb8-80d00a5369c0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 750.270793] env[62503]: DEBUG nova.compute.manager [-] [instance: 47d67edd-0860-49a6-ab7e-0511cffb82ae] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 750.270873] env[62503]: DEBUG nova.network.neutron [-] [instance: 47d67edd-0860-49a6-ab7e-0511cffb82ae] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 750.301966] env[62503]: DEBUG nova.network.neutron [-] [instance: 47d67edd-0860-49a6-ab7e-0511cffb82ae] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 750.317893] env[62503]: DEBUG nova.compute.manager [None req-57a38f06-0431-4714-9351-3e81164f0fed tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] [instance: 141d7d04-0267-4e15-90ed-112ac8fb8c9b] Start spawning the instance on the hypervisor. {{(pid=62503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2641}} [ 750.365929] env[62503]: DEBUG nova.virt.hardware [None req-57a38f06-0431-4714-9351-3e81164f0fed tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:26:20Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:26:03Z,direct_url=,disk_format='vmdk',id=8150ca02-f879-471d-8913-459408f127a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26d9ee75d25b4018b8bb1fb11c8bd98c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:26:04Z,virtual_size=,visibility=), allow threads: False {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 750.366282] env[62503]: DEBUG nova.virt.hardware [None req-57a38f06-0431-4714-9351-3e81164f0fed tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Flavor limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 750.366451] env[62503]: DEBUG nova.virt.hardware [None req-57a38f06-0431-4714-9351-3e81164f0fed tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Image limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 750.366672] env[62503]: DEBUG nova.virt.hardware [None req-57a38f06-0431-4714-9351-3e81164f0fed tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Flavor pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 750.366868] env[62503]: DEBUG nova.virt.hardware [None req-57a38f06-0431-4714-9351-3e81164f0fed tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Image pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 750.367061] env[62503]: DEBUG nova.virt.hardware [None req-57a38f06-0431-4714-9351-3e81164f0fed tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 750.367313] env[62503]: DEBUG nova.virt.hardware [None req-57a38f06-0431-4714-9351-3e81164f0fed tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 750.367508] env[62503]: DEBUG nova.virt.hardware [None req-57a38f06-0431-4714-9351-3e81164f0fed tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 750.367727] env[62503]: DEBUG nova.virt.hardware [None req-57a38f06-0431-4714-9351-3e81164f0fed tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Got 1 possible topologies {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 750.367906] env[62503]: DEBUG nova.virt.hardware [None req-57a38f06-0431-4714-9351-3e81164f0fed tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 750.368091] env[62503]: DEBUG nova.virt.hardware [None req-57a38f06-0431-4714-9351-3e81164f0fed tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 750.369074] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a6b44b9-c8f2-4f9c-8b16-f6921ba15b31 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.376957] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed146b20-81e8-484d-a9a6-8f45a465bd0f {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.391735] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-57a38f06-0431-4714-9351-3e81164f0fed tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] [instance: 141d7d04-0267-4e15-90ed-112ac8fb8c9b] Instance VIF info [] {{(pid=62503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 750.397244] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-57a38f06-0431-4714-9351-3e81164f0fed tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Creating folder: Project (c669817ba9ff411983eabd4eaa7974f2). Parent ref: group-v294540. {{(pid=62503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 750.397527] env[62503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-90c1fa01-2f15-44bd-9096-8a2e199bfe10 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.409018] env[62503]: INFO nova.virt.vmwareapi.vm_util [None req-57a38f06-0431-4714-9351-3e81164f0fed tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Created folder: Project (c669817ba9ff411983eabd4eaa7974f2) in parent group-v294540. [ 750.409018] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-57a38f06-0431-4714-9351-3e81164f0fed tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Creating folder: Instances. Parent ref: group-v294557. {{(pid=62503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 750.409018] env[62503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-845be24c-8587-43eb-a50b-3215bd55c717 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.416592] env[62503]: INFO nova.virt.vmwareapi.vm_util [None req-57a38f06-0431-4714-9351-3e81164f0fed tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Created folder: Instances in parent group-v294557. [ 750.416830] env[62503]: DEBUG oslo.service.loopingcall [None req-57a38f06-0431-4714-9351-3e81164f0fed tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 750.417020] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 141d7d04-0267-4e15-90ed-112ac8fb8c9b] Creating VM on the ESX host {{(pid=62503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 750.417215] env[62503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2b2f093b-f7b1-4702-a752-2ddc026b55bc {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.435689] env[62503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 750.435689] env[62503]: value = "task-1387729" [ 750.435689] env[62503]: _type = "Task" [ 750.435689] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 750.445875] env[62503]: DEBUG oslo_vmware.api [-] Task: {'id': task-1387729, 'name': CreateVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.656337] env[62503]: DEBUG oslo_concurrency.lockutils [None req-8c63ebde-167f-4614-8d90-7622283ae73c tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.365s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 750.656488] env[62503]: DEBUG nova.compute.manager [None req-8c63ebde-167f-4614-8d90-7622283ae73c tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] [instance: d4990c66-63d5-43b0-8187-2074c99ccde2] Start building networks asynchronously for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2832}} [ 750.659217] env[62503]: DEBUG oslo_concurrency.lockutils [None req-35b849ab-bc6b-4b84-91df-53a0636c028a tempest-ServerMetadataNegativeTestJSON-362530580 tempest-ServerMetadataNegativeTestJSON-362530580-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.350s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 750.661459] env[62503]: INFO nova.compute.claims [None req-35b849ab-bc6b-4b84-91df-53a0636c028a tempest-ServerMetadataNegativeTestJSON-362530580 tempest-ServerMetadataNegativeTestJSON-362530580-project-member] [instance: 48d9b18d-04b5-44e4-809e-383819d39418] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 750.805383] env[62503]: DEBUG nova.network.neutron [-] [instance: 47d67edd-0860-49a6-ab7e-0511cffb82ae] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 750.948667] env[62503]: DEBUG oslo_vmware.api [-] Task: {'id': task-1387729, 'name': CreateVM_Task, 'duration_secs': 0.228618} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 750.949082] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 141d7d04-0267-4e15-90ed-112ac8fb8c9b] Created VM on the ESX host {{(pid=62503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 750.949262] env[62503]: DEBUG oslo_concurrency.lockutils [None req-57a38f06-0431-4714-9351-3e81164f0fed tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 750.949422] env[62503]: DEBUG oslo_concurrency.lockutils [None req-57a38f06-0431-4714-9351-3e81164f0fed tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Acquired lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 750.949794] env[62503]: DEBUG oslo_concurrency.lockutils [None req-57a38f06-0431-4714-9351-3e81164f0fed tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 750.950053] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ce6eeb08-34d3-4a46-9eaf-22d086434c6c {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.954377] env[62503]: DEBUG oslo_vmware.api [None req-57a38f06-0431-4714-9351-3e81164f0fed tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Waiting for the task: (returnval){ [ 750.954377] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52158622-52dc-3013-e7a6-4c5f5196bdda" [ 750.954377] env[62503]: _type = "Task" [ 750.954377] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 750.963282] env[62503]: DEBUG oslo_vmware.api [None req-57a38f06-0431-4714-9351-3e81164f0fed tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52158622-52dc-3013-e7a6-4c5f5196bdda, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.168213] env[62503]: DEBUG nova.compute.utils [None req-8c63ebde-167f-4614-8d90-7622283ae73c tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Using /dev/sd instead of None {{(pid=62503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 751.169762] env[62503]: DEBUG nova.compute.manager [None req-8c63ebde-167f-4614-8d90-7622283ae73c tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] [instance: d4990c66-63d5-43b0-8187-2074c99ccde2] Not allocating networking since 'none' was specified. {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1980}} [ 751.310776] env[62503]: INFO nova.compute.manager [-] [instance: 47d67edd-0860-49a6-ab7e-0511cffb82ae] Took 1.04 seconds to deallocate network for instance. [ 751.310776] env[62503]: DEBUG nova.compute.claims [None req-bf654c76-1dff-4311-afb8-80d00a5369c0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: 47d67edd-0860-49a6-ab7e-0511cffb82ae] Aborting claim: {{(pid=62503) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 751.314224] env[62503]: DEBUG oslo_concurrency.lockutils [None req-bf654c76-1dff-4311-afb8-80d00a5369c0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 751.464657] env[62503]: DEBUG oslo_vmware.api [None req-57a38f06-0431-4714-9351-3e81164f0fed tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52158622-52dc-3013-e7a6-4c5f5196bdda, 'name': SearchDatastore_Task, 'duration_secs': 0.012104} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 751.464944] env[62503]: DEBUG oslo_concurrency.lockutils [None req-57a38f06-0431-4714-9351-3e81164f0fed tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Releasing lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 751.465357] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-57a38f06-0431-4714-9351-3e81164f0fed tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] [instance: 141d7d04-0267-4e15-90ed-112ac8fb8c9b] Processing image 8150ca02-f879-471d-8913-459408f127a1 {{(pid=62503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 751.465447] env[62503]: DEBUG oslo_concurrency.lockutils [None req-57a38f06-0431-4714-9351-3e81164f0fed tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 751.465557] env[62503]: DEBUG oslo_concurrency.lockutils [None req-57a38f06-0431-4714-9351-3e81164f0fed tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Acquired lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 751.465726] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-57a38f06-0431-4714-9351-3e81164f0fed tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 751.465989] env[62503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-eb058e60-bda0-47ac-8d12-39df3134f5e6 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.474343] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-57a38f06-0431-4714-9351-3e81164f0fed tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 751.474560] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-57a38f06-0431-4714-9351-3e81164f0fed tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 751.475331] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d971cf4a-8cb3-4385-ae95-af0b5ba08ff6 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.480624] env[62503]: DEBUG oslo_vmware.api [None req-57a38f06-0431-4714-9351-3e81164f0fed tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Waiting for the task: (returnval){ [ 751.480624] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52fb3d18-4359-7f70-cd10-1a7ea84b46fa" [ 751.480624] env[62503]: _type = "Task" [ 751.480624] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 751.487893] env[62503]: DEBUG oslo_vmware.api [None req-57a38f06-0431-4714-9351-3e81164f0fed tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52fb3d18-4359-7f70-cd10-1a7ea84b46fa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.677335] env[62503]: DEBUG nova.compute.manager [None req-8c63ebde-167f-4614-8d90-7622283ae73c tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] [instance: d4990c66-63d5-43b0-8187-2074c99ccde2] Start building block device mappings for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2867}} [ 751.988408] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edfce563-1f8d-46ff-a514-540d303ce088 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.994700] env[62503]: DEBUG oslo_vmware.api [None req-57a38f06-0431-4714-9351-3e81164f0fed tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52fb3d18-4359-7f70-cd10-1a7ea84b46fa, 'name': SearchDatastore_Task, 'duration_secs': 0.007458} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 751.995759] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2f544938-e5df-4df7-b5d9-a0712dd1ae06 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.000414] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfa8f6f6-d029-494c-92a4-01484ea8bfc6 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.005036] env[62503]: DEBUG oslo_vmware.api [None req-57a38f06-0431-4714-9351-3e81164f0fed tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Waiting for the task: (returnval){ [ 752.005036] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]524a62d5-e186-51cf-5b0e-e46dead6f328" [ 752.005036] env[62503]: _type = "Task" [ 752.005036] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 752.034929] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32365f51-c5d7-4115-b62c-6621fd921ff7 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.040466] env[62503]: DEBUG oslo_vmware.api [None req-57a38f06-0431-4714-9351-3e81164f0fed tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]524a62d5-e186-51cf-5b0e-e46dead6f328, 'name': SearchDatastore_Task, 'duration_secs': 0.008576} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 752.042370] env[62503]: DEBUG oslo_concurrency.lockutils [None req-57a38f06-0431-4714-9351-3e81164f0fed tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Releasing lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 752.042653] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-57a38f06-0431-4714-9351-3e81164f0fed tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk to [datastore1] 141d7d04-0267-4e15-90ed-112ac8fb8c9b/141d7d04-0267-4e15-90ed-112ac8fb8c9b.vmdk {{(pid=62503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 752.044727] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ef3ee65a-c3d4-457d-b2b9-36b9f3d645a4 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.049463] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ef5c0d2-876c-45f2-ad21-c945cfcf11e6 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.063562] env[62503]: DEBUG nova.compute.provider_tree [None req-35b849ab-bc6b-4b84-91df-53a0636c028a tempest-ServerMetadataNegativeTestJSON-362530580 tempest-ServerMetadataNegativeTestJSON-362530580-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 752.065972] env[62503]: DEBUG oslo_vmware.api [None req-57a38f06-0431-4714-9351-3e81164f0fed tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Waiting for the task: (returnval){ [ 752.065972] env[62503]: value = "task-1387730" [ 752.065972] env[62503]: _type = "Task" [ 752.065972] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 752.074232] env[62503]: DEBUG oslo_vmware.api [None req-57a38f06-0431-4714-9351-3e81164f0fed tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Task: {'id': task-1387730, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.567765] env[62503]: DEBUG nova.scheduler.client.report [None req-35b849ab-bc6b-4b84-91df-53a0636c028a tempest-ServerMetadataNegativeTestJSON-362530580 tempest-ServerMetadataNegativeTestJSON-362530580-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 752.579427] env[62503]: DEBUG oslo_vmware.api [None req-57a38f06-0431-4714-9351-3e81164f0fed tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Task: {'id': task-1387730, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.496367} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 752.579548] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-57a38f06-0431-4714-9351-3e81164f0fed tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk to [datastore1] 141d7d04-0267-4e15-90ed-112ac8fb8c9b/141d7d04-0267-4e15-90ed-112ac8fb8c9b.vmdk {{(pid=62503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 752.580425] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-57a38f06-0431-4714-9351-3e81164f0fed tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] [instance: 141d7d04-0267-4e15-90ed-112ac8fb8c9b] Extending root virtual disk to 1048576 {{(pid=62503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 752.580425] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-05c4f9cf-2895-4958-b25b-f17bada70ca4 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.585639] env[62503]: DEBUG oslo_vmware.api [None req-57a38f06-0431-4714-9351-3e81164f0fed tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Waiting for the task: (returnval){ [ 752.585639] env[62503]: value = "task-1387731" [ 752.585639] env[62503]: _type = "Task" [ 752.585639] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 752.594053] env[62503]: DEBUG oslo_vmware.api [None req-57a38f06-0431-4714-9351-3e81164f0fed tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Task: {'id': task-1387731, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.689803] env[62503]: DEBUG nova.compute.manager [None req-8c63ebde-167f-4614-8d90-7622283ae73c tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] [instance: d4990c66-63d5-43b0-8187-2074c99ccde2] Start spawning the instance on the hypervisor. {{(pid=62503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2641}} [ 752.714480] env[62503]: DEBUG nova.virt.hardware [None req-8c63ebde-167f-4614-8d90-7622283ae73c tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:26:20Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:26:03Z,direct_url=,disk_format='vmdk',id=8150ca02-f879-471d-8913-459408f127a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26d9ee75d25b4018b8bb1fb11c8bd98c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:26:04Z,virtual_size=,visibility=), allow threads: False {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 752.714726] env[62503]: DEBUG nova.virt.hardware [None req-8c63ebde-167f-4614-8d90-7622283ae73c tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Flavor limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 752.714882] env[62503]: DEBUG nova.virt.hardware [None req-8c63ebde-167f-4614-8d90-7622283ae73c tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Image limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 752.715082] env[62503]: DEBUG nova.virt.hardware [None req-8c63ebde-167f-4614-8d90-7622283ae73c tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Flavor pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 752.715227] env[62503]: DEBUG nova.virt.hardware [None req-8c63ebde-167f-4614-8d90-7622283ae73c tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Image pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 752.715373] env[62503]: DEBUG nova.virt.hardware [None req-8c63ebde-167f-4614-8d90-7622283ae73c tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 752.715581] env[62503]: DEBUG nova.virt.hardware [None req-8c63ebde-167f-4614-8d90-7622283ae73c tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 752.715747] env[62503]: DEBUG nova.virt.hardware [None req-8c63ebde-167f-4614-8d90-7622283ae73c tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 752.715903] env[62503]: DEBUG nova.virt.hardware [None req-8c63ebde-167f-4614-8d90-7622283ae73c tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Got 1 possible topologies {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 752.716077] env[62503]: DEBUG nova.virt.hardware [None req-8c63ebde-167f-4614-8d90-7622283ae73c tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 752.716250] env[62503]: DEBUG nova.virt.hardware [None req-8c63ebde-167f-4614-8d90-7622283ae73c tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 752.717195] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe3accde-7506-46c1-a119-e233a08893ae {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.725443] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e60e8240-8946-46e1-b1f5-98f78174bd81 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.742158] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-8c63ebde-167f-4614-8d90-7622283ae73c tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] [instance: d4990c66-63d5-43b0-8187-2074c99ccde2] Instance VIF info [] {{(pid=62503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 752.751069] env[62503]: DEBUG oslo.service.loopingcall [None req-8c63ebde-167f-4614-8d90-7622283ae73c tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 752.751069] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d4990c66-63d5-43b0-8187-2074c99ccde2] Creating VM on the ESX host {{(pid=62503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 752.751069] env[62503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a0f189c2-1f59-4c4d-ac7a-a2f2722a9a7e {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.766013] env[62503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 752.766013] env[62503]: value = "task-1387732" [ 752.766013] env[62503]: _type = "Task" [ 752.766013] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 752.772912] env[62503]: DEBUG oslo_vmware.api [-] Task: {'id': task-1387732, 'name': CreateVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.075789] env[62503]: DEBUG oslo_concurrency.lockutils [None req-35b849ab-bc6b-4b84-91df-53a0636c028a tempest-ServerMetadataNegativeTestJSON-362530580 tempest-ServerMetadataNegativeTestJSON-362530580-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.416s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 753.077042] env[62503]: DEBUG nova.compute.manager [None req-35b849ab-bc6b-4b84-91df-53a0636c028a tempest-ServerMetadataNegativeTestJSON-362530580 tempest-ServerMetadataNegativeTestJSON-362530580-project-member] [instance: 48d9b18d-04b5-44e4-809e-383819d39418] Start building networks asynchronously for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2832}} [ 753.079104] env[62503]: DEBUG oslo_concurrency.lockutils [None req-5f6b79ba-d091-4f79-8bf5-714be49e7af9 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.875s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 753.080406] env[62503]: INFO nova.compute.claims [None req-5f6b79ba-d091-4f79-8bf5-714be49e7af9 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] [instance: b9259ced-344a-42e5-835d-3713631a68c7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 753.096319] env[62503]: DEBUG oslo_vmware.api [None req-57a38f06-0431-4714-9351-3e81164f0fed tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Task: {'id': task-1387731, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.059871} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 753.097198] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-57a38f06-0431-4714-9351-3e81164f0fed tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] [instance: 141d7d04-0267-4e15-90ed-112ac8fb8c9b] Extended root virtual disk {{(pid=62503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 753.097471] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18a5c716-5955-4104-96cd-8d7a8711cb2d {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.119687] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-57a38f06-0431-4714-9351-3e81164f0fed tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] [instance: 141d7d04-0267-4e15-90ed-112ac8fb8c9b] Reconfiguring VM instance instance-0000002c to attach disk [datastore1] 141d7d04-0267-4e15-90ed-112ac8fb8c9b/141d7d04-0267-4e15-90ed-112ac8fb8c9b.vmdk or device None with type sparse {{(pid=62503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 753.121349] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4cfa26f0-4866-4999-a2ab-a7118d5fdfbb {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.140077] env[62503]: DEBUG oslo_vmware.api [None req-57a38f06-0431-4714-9351-3e81164f0fed tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Waiting for the task: (returnval){ [ 753.140077] env[62503]: value = "task-1387733" [ 753.140077] env[62503]: _type = "Task" [ 753.140077] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 753.147902] env[62503]: DEBUG oslo_vmware.api [None req-57a38f06-0431-4714-9351-3e81164f0fed tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Task: {'id': task-1387733, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.278177] env[62503]: DEBUG oslo_vmware.api [-] Task: {'id': task-1387732, 'name': CreateVM_Task, 'duration_secs': 0.237867} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 753.278424] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d4990c66-63d5-43b0-8187-2074c99ccde2] Created VM on the ESX host {{(pid=62503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 753.278981] env[62503]: DEBUG oslo_concurrency.lockutils [None req-8c63ebde-167f-4614-8d90-7622283ae73c tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 753.279227] env[62503]: DEBUG oslo_concurrency.lockutils [None req-8c63ebde-167f-4614-8d90-7622283ae73c tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 753.279699] env[62503]: DEBUG oslo_concurrency.lockutils [None req-8c63ebde-167f-4614-8d90-7622283ae73c tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 753.280166] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cb58b566-b1d7-49a4-9098-711f936fcd00 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.285231] env[62503]: DEBUG oslo_vmware.api [None req-8c63ebde-167f-4614-8d90-7622283ae73c tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Waiting for the task: (returnval){ [ 753.285231] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52a0823a-7a18-b950-4677-a811dd3f81bf" [ 753.285231] env[62503]: _type = "Task" [ 753.285231] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 753.292763] env[62503]: DEBUG oslo_vmware.api [None req-8c63ebde-167f-4614-8d90-7622283ae73c tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52a0823a-7a18-b950-4677-a811dd3f81bf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.350612] env[62503]: DEBUG oslo_concurrency.lockutils [None req-1d96ce89-6413-4ef6-9cd7-d46364d67260 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028-project-member] Acquiring lock "35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 753.350854] env[62503]: DEBUG oslo_concurrency.lockutils [None req-1d96ce89-6413-4ef6-9cd7-d46364d67260 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028-project-member] Lock "35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 753.584857] env[62503]: DEBUG nova.compute.utils [None req-35b849ab-bc6b-4b84-91df-53a0636c028a tempest-ServerMetadataNegativeTestJSON-362530580 tempest-ServerMetadataNegativeTestJSON-362530580-project-member] Using /dev/sd instead of None {{(pid=62503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 753.588539] env[62503]: DEBUG nova.compute.manager [None req-35b849ab-bc6b-4b84-91df-53a0636c028a tempest-ServerMetadataNegativeTestJSON-362530580 tempest-ServerMetadataNegativeTestJSON-362530580-project-member] [instance: 48d9b18d-04b5-44e4-809e-383819d39418] Allocating IP information in the background. {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 753.588760] env[62503]: DEBUG nova.network.neutron [None req-35b849ab-bc6b-4b84-91df-53a0636c028a tempest-ServerMetadataNegativeTestJSON-362530580 tempest-ServerMetadataNegativeTestJSON-362530580-project-member] [instance: 48d9b18d-04b5-44e4-809e-383819d39418] allocate_for_instance() {{(pid=62503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 753.645728] env[62503]: DEBUG nova.policy [None req-35b849ab-bc6b-4b84-91df-53a0636c028a tempest-ServerMetadataNegativeTestJSON-362530580 tempest-ServerMetadataNegativeTestJSON-362530580-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd66a2185025a4450a5e4b7f47540a4aa', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7516ffd5f3cf4db38264d246e8bee8a6', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62503) authorize /opt/stack/nova/nova/policy.py:201}} [ 753.652645] env[62503]: DEBUG oslo_vmware.api [None req-57a38f06-0431-4714-9351-3e81164f0fed tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Task: {'id': task-1387733, 'name': ReconfigVM_Task, 'duration_secs': 0.331442} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 753.652903] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-57a38f06-0431-4714-9351-3e81164f0fed tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] [instance: 141d7d04-0267-4e15-90ed-112ac8fb8c9b] Reconfigured VM instance instance-0000002c to attach disk [datastore1] 141d7d04-0267-4e15-90ed-112ac8fb8c9b/141d7d04-0267-4e15-90ed-112ac8fb8c9b.vmdk or device None with type sparse {{(pid=62503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 753.653487] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a37af6a1-8e2b-486a-a758-93040df5a26f {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.658965] env[62503]: DEBUG oslo_vmware.api [None req-57a38f06-0431-4714-9351-3e81164f0fed tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Waiting for the task: (returnval){ [ 753.658965] env[62503]: value = "task-1387734" [ 753.658965] env[62503]: _type = "Task" [ 753.658965] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 753.666471] env[62503]: DEBUG oslo_vmware.api [None req-57a38f06-0431-4714-9351-3e81164f0fed tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Task: {'id': task-1387734, 'name': Rename_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.795578] env[62503]: DEBUG oslo_vmware.api [None req-8c63ebde-167f-4614-8d90-7622283ae73c tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52a0823a-7a18-b950-4677-a811dd3f81bf, 'name': SearchDatastore_Task, 'duration_secs': 0.009275} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 753.795872] env[62503]: DEBUG oslo_concurrency.lockutils [None req-8c63ebde-167f-4614-8d90-7622283ae73c tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 753.796112] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-8c63ebde-167f-4614-8d90-7622283ae73c tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] [instance: d4990c66-63d5-43b0-8187-2074c99ccde2] Processing image 8150ca02-f879-471d-8913-459408f127a1 {{(pid=62503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 753.796346] env[62503]: DEBUG oslo_concurrency.lockutils [None req-8c63ebde-167f-4614-8d90-7622283ae73c tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 753.796527] env[62503]: DEBUG oslo_concurrency.lockutils [None req-8c63ebde-167f-4614-8d90-7622283ae73c tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 753.796711] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-8c63ebde-167f-4614-8d90-7622283ae73c tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 753.796965] env[62503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c5199f4a-ccb1-4b4a-8635-c408211cd253 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.805289] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-8c63ebde-167f-4614-8d90-7622283ae73c tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 753.805471] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-8c63ebde-167f-4614-8d90-7622283ae73c tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 753.806158] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-04acf7b2-8ec9-43ea-98b6-039b55e29fdd {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.810795] env[62503]: DEBUG oslo_vmware.api [None req-8c63ebde-167f-4614-8d90-7622283ae73c tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Waiting for the task: (returnval){ [ 753.810795] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52c991a3-8ac6-8dc1-6226-be7424c1ea30" [ 753.810795] env[62503]: _type = "Task" [ 753.810795] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 753.818410] env[62503]: DEBUG oslo_vmware.api [None req-8c63ebde-167f-4614-8d90-7622283ae73c tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52c991a3-8ac6-8dc1-6226-be7424c1ea30, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.957517] env[62503]: DEBUG nova.network.neutron [None req-35b849ab-bc6b-4b84-91df-53a0636c028a tempest-ServerMetadataNegativeTestJSON-362530580 tempest-ServerMetadataNegativeTestJSON-362530580-project-member] [instance: 48d9b18d-04b5-44e4-809e-383819d39418] Successfully created port: 1f509a03-3771-46fe-a0f9-4aed7d32e203 {{(pid=62503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 754.090589] env[62503]: DEBUG nova.compute.manager [None req-35b849ab-bc6b-4b84-91df-53a0636c028a tempest-ServerMetadataNegativeTestJSON-362530580 tempest-ServerMetadataNegativeTestJSON-362530580-project-member] [instance: 48d9b18d-04b5-44e4-809e-383819d39418] Start building block device mappings for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2867}} [ 754.179464] env[62503]: DEBUG oslo_vmware.api [None req-57a38f06-0431-4714-9351-3e81164f0fed tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Task: {'id': task-1387734, 'name': Rename_Task, 'duration_secs': 0.126756} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 754.179792] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-57a38f06-0431-4714-9351-3e81164f0fed tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] [instance: 141d7d04-0267-4e15-90ed-112ac8fb8c9b] Powering on the VM {{(pid=62503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 754.180063] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fbfce0cf-bbc4-48ac-b390-e4031db9c11f {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.192296] env[62503]: DEBUG oslo_vmware.api [None req-57a38f06-0431-4714-9351-3e81164f0fed tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Waiting for the task: (returnval){ [ 754.192296] env[62503]: value = "task-1387735" [ 754.192296] env[62503]: _type = "Task" [ 754.192296] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 754.202583] env[62503]: DEBUG oslo_vmware.api [None req-57a38f06-0431-4714-9351-3e81164f0fed tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Task: {'id': task-1387735, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 754.327837] env[62503]: DEBUG oslo_vmware.api [None req-8c63ebde-167f-4614-8d90-7622283ae73c tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52c991a3-8ac6-8dc1-6226-be7424c1ea30, 'name': SearchDatastore_Task, 'duration_secs': 0.008035} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 754.327837] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ffcb9ef8-9fac-4fdb-9e16-4e176cd494b3 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.334490] env[62503]: DEBUG oslo_vmware.api [None req-8c63ebde-167f-4614-8d90-7622283ae73c tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Waiting for the task: (returnval){ [ 754.334490] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52c4dcc0-9bef-153d-4548-5c24ca905a4a" [ 754.334490] env[62503]: _type = "Task" [ 754.334490] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 754.342528] env[62503]: DEBUG oslo_vmware.api [None req-8c63ebde-167f-4614-8d90-7622283ae73c tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52c4dcc0-9bef-153d-4548-5c24ca905a4a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 754.456888] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca0bbe8f-74ea-49c7-a956-822c9eed7ede {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.464821] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35bddb67-e76e-4f11-ae2a-70735472e851 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.494567] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-904a6c42-c628-4fbf-a88a-0c3df90affc3 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.501062] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2356cbe6-f96c-4215-811b-474b3638a167 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.513611] env[62503]: DEBUG nova.compute.provider_tree [None req-5f6b79ba-d091-4f79-8bf5-714be49e7af9 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] Updating inventory in ProviderTree for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 754.703939] env[62503]: DEBUG oslo_vmware.api [None req-57a38f06-0431-4714-9351-3e81164f0fed tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Task: {'id': task-1387735, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 754.714373] env[62503]: DEBUG nova.compute.manager [req-89c39aae-cf6b-4017-9e8e-006b744d61d2 req-4a087bf8-a597-4477-8429-1267ebf0cccd service nova] [instance: 48d9b18d-04b5-44e4-809e-383819d39418] Received event network-changed-1f509a03-3771-46fe-a0f9-4aed7d32e203 {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 754.714611] env[62503]: DEBUG nova.compute.manager [req-89c39aae-cf6b-4017-9e8e-006b744d61d2 req-4a087bf8-a597-4477-8429-1267ebf0cccd service nova] [instance: 48d9b18d-04b5-44e4-809e-383819d39418] Refreshing instance network info cache due to event network-changed-1f509a03-3771-46fe-a0f9-4aed7d32e203. {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11432}} [ 754.714803] env[62503]: DEBUG oslo_concurrency.lockutils [req-89c39aae-cf6b-4017-9e8e-006b744d61d2 req-4a087bf8-a597-4477-8429-1267ebf0cccd service nova] Acquiring lock "refresh_cache-48d9b18d-04b5-44e4-809e-383819d39418" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 754.714907] env[62503]: DEBUG oslo_concurrency.lockutils [req-89c39aae-cf6b-4017-9e8e-006b744d61d2 req-4a087bf8-a597-4477-8429-1267ebf0cccd service nova] Acquired lock "refresh_cache-48d9b18d-04b5-44e4-809e-383819d39418" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 754.715078] env[62503]: DEBUG nova.network.neutron [req-89c39aae-cf6b-4017-9e8e-006b744d61d2 req-4a087bf8-a597-4477-8429-1267ebf0cccd service nova] [instance: 48d9b18d-04b5-44e4-809e-383819d39418] Refreshing network info cache for port 1f509a03-3771-46fe-a0f9-4aed7d32e203 {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 754.845660] env[62503]: DEBUG oslo_vmware.api [None req-8c63ebde-167f-4614-8d90-7622283ae73c tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52c4dcc0-9bef-153d-4548-5c24ca905a4a, 'name': SearchDatastore_Task, 'duration_secs': 0.010005} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 754.846038] env[62503]: DEBUG oslo_concurrency.lockutils [None req-8c63ebde-167f-4614-8d90-7622283ae73c tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 754.846886] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-8c63ebde-167f-4614-8d90-7622283ae73c tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk to [datastore2] d4990c66-63d5-43b0-8187-2074c99ccde2/d4990c66-63d5-43b0-8187-2074c99ccde2.vmdk {{(pid=62503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 754.847170] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c8a78dba-be78-4628-be30-55ea2da04d07 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.857558] env[62503]: DEBUG oslo_vmware.api [None req-8c63ebde-167f-4614-8d90-7622283ae73c tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Waiting for the task: (returnval){ [ 754.857558] env[62503]: value = "task-1387736" [ 754.857558] env[62503]: _type = "Task" [ 754.857558] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 754.863794] env[62503]: DEBUG oslo_vmware.api [None req-8c63ebde-167f-4614-8d90-7622283ae73c tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Task: {'id': task-1387736, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 754.957408] env[62503]: ERROR nova.compute.manager [None req-35b849ab-bc6b-4b84-91df-53a0636c028a tempest-ServerMetadataNegativeTestJSON-362530580 tempest-ServerMetadataNegativeTestJSON-362530580-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 1f509a03-3771-46fe-a0f9-4aed7d32e203, please check neutron logs for more information. [ 754.957408] env[62503]: ERROR nova.compute.manager Traceback (most recent call last): [ 754.957408] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 754.957408] env[62503]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 754.957408] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 754.957408] env[62503]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 754.957408] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 754.957408] env[62503]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 754.957408] env[62503]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 754.957408] env[62503]: ERROR nova.compute.manager self.force_reraise() [ 754.957408] env[62503]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 754.957408] env[62503]: ERROR nova.compute.manager raise self.value [ 754.957408] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 754.957408] env[62503]: ERROR nova.compute.manager updated_port = self._update_port( [ 754.957408] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 754.957408] env[62503]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 754.958260] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 754.958260] env[62503]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 754.958260] env[62503]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 1f509a03-3771-46fe-a0f9-4aed7d32e203, please check neutron logs for more information. [ 754.958260] env[62503]: ERROR nova.compute.manager [ 754.958260] env[62503]: Traceback (most recent call last): [ 754.958260] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 754.958260] env[62503]: listener.cb(fileno) [ 754.958260] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 754.958260] env[62503]: result = function(*args, **kwargs) [ 754.958260] env[62503]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 754.958260] env[62503]: return func(*args, **kwargs) [ 754.958260] env[62503]: File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 754.958260] env[62503]: raise e [ 754.958260] env[62503]: File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 754.958260] env[62503]: nwinfo = self.network_api.allocate_for_instance( [ 754.958260] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 754.958260] env[62503]: created_port_ids = self._update_ports_for_instance( [ 754.958260] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 754.958260] env[62503]: with excutils.save_and_reraise_exception(): [ 754.958260] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 754.958260] env[62503]: self.force_reraise() [ 754.958260] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 754.958260] env[62503]: raise self.value [ 754.958260] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 754.958260] env[62503]: updated_port = self._update_port( [ 754.958260] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 754.958260] env[62503]: _ensure_no_port_binding_failure(port) [ 754.958260] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 754.958260] env[62503]: raise exception.PortBindingFailed(port_id=port['id']) [ 754.959723] env[62503]: nova.exception.PortBindingFailed: Binding failed for port 1f509a03-3771-46fe-a0f9-4aed7d32e203, please check neutron logs for more information. [ 754.959723] env[62503]: Removing descriptor: 16 [ 755.039266] env[62503]: ERROR nova.scheduler.client.report [None req-5f6b79ba-d091-4f79-8bf5-714be49e7af9 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] [req-b98db91c-1eed-495e-8b55-27b524fb636d] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-b98db91c-1eed-495e-8b55-27b524fb636d"}]} [ 755.057807] env[62503]: DEBUG nova.scheduler.client.report [None req-5f6b79ba-d091-4f79-8bf5-714be49e7af9 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] Refreshing inventories for resource provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:819}} [ 755.073906] env[62503]: DEBUG nova.scheduler.client.report [None req-5f6b79ba-d091-4f79-8bf5-714be49e7af9 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] Updating ProviderTree inventory for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:783}} [ 755.074180] env[62503]: DEBUG nova.compute.provider_tree [None req-5f6b79ba-d091-4f79-8bf5-714be49e7af9 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] Updating inventory in ProviderTree for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 755.088913] env[62503]: DEBUG nova.scheduler.client.report [None req-5f6b79ba-d091-4f79-8bf5-714be49e7af9 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] Refreshing aggregate associations for resource provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2, aggregates: None {{(pid=62503) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:828}} [ 755.105413] env[62503]: DEBUG nova.compute.manager [None req-35b849ab-bc6b-4b84-91df-53a0636c028a tempest-ServerMetadataNegativeTestJSON-362530580 tempest-ServerMetadataNegativeTestJSON-362530580-project-member] [instance: 48d9b18d-04b5-44e4-809e-383819d39418] Start spawning the instance on the hypervisor. {{(pid=62503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2641}} [ 755.111607] env[62503]: DEBUG nova.scheduler.client.report [None req-5f6b79ba-d091-4f79-8bf5-714be49e7af9 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] Refreshing trait associations for resource provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2, traits: HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=62503) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:840}} [ 755.135467] env[62503]: DEBUG nova.virt.hardware [None req-35b849ab-bc6b-4b84-91df-53a0636c028a tempest-ServerMetadataNegativeTestJSON-362530580 tempest-ServerMetadataNegativeTestJSON-362530580-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:26:20Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:26:03Z,direct_url=,disk_format='vmdk',id=8150ca02-f879-471d-8913-459408f127a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26d9ee75d25b4018b8bb1fb11c8bd98c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:26:04Z,virtual_size=,visibility=), allow threads: False {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 755.135718] env[62503]: DEBUG nova.virt.hardware [None req-35b849ab-bc6b-4b84-91df-53a0636c028a tempest-ServerMetadataNegativeTestJSON-362530580 tempest-ServerMetadataNegativeTestJSON-362530580-project-member] Flavor limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 755.135876] env[62503]: DEBUG nova.virt.hardware [None req-35b849ab-bc6b-4b84-91df-53a0636c028a tempest-ServerMetadataNegativeTestJSON-362530580 tempest-ServerMetadataNegativeTestJSON-362530580-project-member] Image limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 755.136068] env[62503]: DEBUG nova.virt.hardware [None req-35b849ab-bc6b-4b84-91df-53a0636c028a tempest-ServerMetadataNegativeTestJSON-362530580 tempest-ServerMetadataNegativeTestJSON-362530580-project-member] Flavor pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 755.136213] env[62503]: DEBUG nova.virt.hardware [None req-35b849ab-bc6b-4b84-91df-53a0636c028a tempest-ServerMetadataNegativeTestJSON-362530580 tempest-ServerMetadataNegativeTestJSON-362530580-project-member] Image pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 755.136359] env[62503]: DEBUG nova.virt.hardware [None req-35b849ab-bc6b-4b84-91df-53a0636c028a tempest-ServerMetadataNegativeTestJSON-362530580 tempest-ServerMetadataNegativeTestJSON-362530580-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 755.136568] env[62503]: DEBUG nova.virt.hardware [None req-35b849ab-bc6b-4b84-91df-53a0636c028a tempest-ServerMetadataNegativeTestJSON-362530580 tempest-ServerMetadataNegativeTestJSON-362530580-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 755.136725] env[62503]: DEBUG nova.virt.hardware [None req-35b849ab-bc6b-4b84-91df-53a0636c028a tempest-ServerMetadataNegativeTestJSON-362530580 tempest-ServerMetadataNegativeTestJSON-362530580-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 755.136896] env[62503]: DEBUG nova.virt.hardware [None req-35b849ab-bc6b-4b84-91df-53a0636c028a tempest-ServerMetadataNegativeTestJSON-362530580 tempest-ServerMetadataNegativeTestJSON-362530580-project-member] Got 1 possible topologies {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 755.137067] env[62503]: DEBUG nova.virt.hardware [None req-35b849ab-bc6b-4b84-91df-53a0636c028a tempest-ServerMetadataNegativeTestJSON-362530580 tempest-ServerMetadataNegativeTestJSON-362530580-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 755.137254] env[62503]: DEBUG nova.virt.hardware [None req-35b849ab-bc6b-4b84-91df-53a0636c028a tempest-ServerMetadataNegativeTestJSON-362530580 tempest-ServerMetadataNegativeTestJSON-362530580-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 755.138559] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5a1afa0-d71a-4da1-83db-80a76f29e8c5 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.150188] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68d1a515-04e2-4477-8f6a-443daa9948b1 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.164788] env[62503]: ERROR nova.compute.manager [None req-35b849ab-bc6b-4b84-91df-53a0636c028a tempest-ServerMetadataNegativeTestJSON-362530580 tempest-ServerMetadataNegativeTestJSON-362530580-project-member] [instance: 48d9b18d-04b5-44e4-809e-383819d39418] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 1f509a03-3771-46fe-a0f9-4aed7d32e203, please check neutron logs for more information. [ 755.164788] env[62503]: ERROR nova.compute.manager [instance: 48d9b18d-04b5-44e4-809e-383819d39418] Traceback (most recent call last): [ 755.164788] env[62503]: ERROR nova.compute.manager [instance: 48d9b18d-04b5-44e4-809e-383819d39418] File "/opt/stack/nova/nova/compute/manager.py", line 2897, in _build_resources [ 755.164788] env[62503]: ERROR nova.compute.manager [instance: 48d9b18d-04b5-44e4-809e-383819d39418] yield resources [ 755.164788] env[62503]: ERROR nova.compute.manager [instance: 48d9b18d-04b5-44e4-809e-383819d39418] File "/opt/stack/nova/nova/compute/manager.py", line 2644, in _build_and_run_instance [ 755.164788] env[62503]: ERROR nova.compute.manager [instance: 48d9b18d-04b5-44e4-809e-383819d39418] self.driver.spawn(context, instance, image_meta, [ 755.164788] env[62503]: ERROR nova.compute.manager [instance: 48d9b18d-04b5-44e4-809e-383819d39418] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 755.164788] env[62503]: ERROR nova.compute.manager [instance: 48d9b18d-04b5-44e4-809e-383819d39418] self._vmops.spawn(context, instance, image_meta, injected_files, [ 755.164788] env[62503]: ERROR nova.compute.manager [instance: 48d9b18d-04b5-44e4-809e-383819d39418] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 755.164788] env[62503]: ERROR nova.compute.manager [instance: 48d9b18d-04b5-44e4-809e-383819d39418] vm_ref = self.build_virtual_machine(instance, [ 755.164788] env[62503]: ERROR nova.compute.manager [instance: 48d9b18d-04b5-44e4-809e-383819d39418] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 755.165261] env[62503]: ERROR nova.compute.manager [instance: 48d9b18d-04b5-44e4-809e-383819d39418] vif_infos = vmwarevif.get_vif_info(self._session, [ 755.165261] env[62503]: ERROR nova.compute.manager [instance: 48d9b18d-04b5-44e4-809e-383819d39418] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 755.165261] env[62503]: ERROR nova.compute.manager [instance: 48d9b18d-04b5-44e4-809e-383819d39418] for vif in network_info: [ 755.165261] env[62503]: ERROR nova.compute.manager [instance: 48d9b18d-04b5-44e4-809e-383819d39418] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 755.165261] env[62503]: ERROR nova.compute.manager [instance: 48d9b18d-04b5-44e4-809e-383819d39418] return self._sync_wrapper(fn, *args, **kwargs) [ 755.165261] env[62503]: ERROR nova.compute.manager [instance: 48d9b18d-04b5-44e4-809e-383819d39418] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 755.165261] env[62503]: ERROR nova.compute.manager [instance: 48d9b18d-04b5-44e4-809e-383819d39418] self.wait() [ 755.165261] env[62503]: ERROR nova.compute.manager [instance: 48d9b18d-04b5-44e4-809e-383819d39418] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 755.165261] env[62503]: ERROR nova.compute.manager [instance: 48d9b18d-04b5-44e4-809e-383819d39418] self[:] = self._gt.wait() [ 755.165261] env[62503]: ERROR nova.compute.manager [instance: 48d9b18d-04b5-44e4-809e-383819d39418] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 755.165261] env[62503]: ERROR nova.compute.manager [instance: 48d9b18d-04b5-44e4-809e-383819d39418] return self._exit_event.wait() [ 755.165261] env[62503]: ERROR nova.compute.manager [instance: 48d9b18d-04b5-44e4-809e-383819d39418] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 755.165261] env[62503]: ERROR nova.compute.manager [instance: 48d9b18d-04b5-44e4-809e-383819d39418] current.throw(*self._exc) [ 755.165709] env[62503]: ERROR nova.compute.manager [instance: 48d9b18d-04b5-44e4-809e-383819d39418] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 755.165709] env[62503]: ERROR nova.compute.manager [instance: 48d9b18d-04b5-44e4-809e-383819d39418] result = function(*args, **kwargs) [ 755.165709] env[62503]: ERROR nova.compute.manager [instance: 48d9b18d-04b5-44e4-809e-383819d39418] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 755.165709] env[62503]: ERROR nova.compute.manager [instance: 48d9b18d-04b5-44e4-809e-383819d39418] return func(*args, **kwargs) [ 755.165709] env[62503]: ERROR nova.compute.manager [instance: 48d9b18d-04b5-44e4-809e-383819d39418] File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 755.165709] env[62503]: ERROR nova.compute.manager [instance: 48d9b18d-04b5-44e4-809e-383819d39418] raise e [ 755.165709] env[62503]: ERROR nova.compute.manager [instance: 48d9b18d-04b5-44e4-809e-383819d39418] File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 755.165709] env[62503]: ERROR nova.compute.manager [instance: 48d9b18d-04b5-44e4-809e-383819d39418] nwinfo = self.network_api.allocate_for_instance( [ 755.165709] env[62503]: ERROR nova.compute.manager [instance: 48d9b18d-04b5-44e4-809e-383819d39418] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 755.165709] env[62503]: ERROR nova.compute.manager [instance: 48d9b18d-04b5-44e4-809e-383819d39418] created_port_ids = self._update_ports_for_instance( [ 755.165709] env[62503]: ERROR nova.compute.manager [instance: 48d9b18d-04b5-44e4-809e-383819d39418] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 755.165709] env[62503]: ERROR nova.compute.manager [instance: 48d9b18d-04b5-44e4-809e-383819d39418] with excutils.save_and_reraise_exception(): [ 755.165709] env[62503]: ERROR nova.compute.manager [instance: 48d9b18d-04b5-44e4-809e-383819d39418] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 755.166155] env[62503]: ERROR nova.compute.manager [instance: 48d9b18d-04b5-44e4-809e-383819d39418] self.force_reraise() [ 755.166155] env[62503]: ERROR nova.compute.manager [instance: 48d9b18d-04b5-44e4-809e-383819d39418] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 755.166155] env[62503]: ERROR nova.compute.manager [instance: 48d9b18d-04b5-44e4-809e-383819d39418] raise self.value [ 755.166155] env[62503]: ERROR nova.compute.manager [instance: 48d9b18d-04b5-44e4-809e-383819d39418] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 755.166155] env[62503]: ERROR nova.compute.manager [instance: 48d9b18d-04b5-44e4-809e-383819d39418] updated_port = self._update_port( [ 755.166155] env[62503]: ERROR nova.compute.manager [instance: 48d9b18d-04b5-44e4-809e-383819d39418] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 755.166155] env[62503]: ERROR nova.compute.manager [instance: 48d9b18d-04b5-44e4-809e-383819d39418] _ensure_no_port_binding_failure(port) [ 755.166155] env[62503]: ERROR nova.compute.manager [instance: 48d9b18d-04b5-44e4-809e-383819d39418] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 755.166155] env[62503]: ERROR nova.compute.manager [instance: 48d9b18d-04b5-44e4-809e-383819d39418] raise exception.PortBindingFailed(port_id=port['id']) [ 755.166155] env[62503]: ERROR nova.compute.manager [instance: 48d9b18d-04b5-44e4-809e-383819d39418] nova.exception.PortBindingFailed: Binding failed for port 1f509a03-3771-46fe-a0f9-4aed7d32e203, please check neutron logs for more information. [ 755.166155] env[62503]: ERROR nova.compute.manager [instance: 48d9b18d-04b5-44e4-809e-383819d39418] [ 755.166155] env[62503]: INFO nova.compute.manager [None req-35b849ab-bc6b-4b84-91df-53a0636c028a tempest-ServerMetadataNegativeTestJSON-362530580 tempest-ServerMetadataNegativeTestJSON-362530580-project-member] [instance: 48d9b18d-04b5-44e4-809e-383819d39418] Terminating instance [ 755.169892] env[62503]: DEBUG oslo_concurrency.lockutils [None req-35b849ab-bc6b-4b84-91df-53a0636c028a tempest-ServerMetadataNegativeTestJSON-362530580 tempest-ServerMetadataNegativeTestJSON-362530580-project-member] Acquiring lock "refresh_cache-48d9b18d-04b5-44e4-809e-383819d39418" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 755.207063] env[62503]: DEBUG oslo_vmware.api [None req-57a38f06-0431-4714-9351-3e81164f0fed tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Task: {'id': task-1387735, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 755.241383] env[62503]: DEBUG nova.network.neutron [req-89c39aae-cf6b-4017-9e8e-006b744d61d2 req-4a087bf8-a597-4477-8429-1267ebf0cccd service nova] [instance: 48d9b18d-04b5-44e4-809e-383819d39418] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 755.340872] env[62503]: DEBUG nova.network.neutron [req-89c39aae-cf6b-4017-9e8e-006b744d61d2 req-4a087bf8-a597-4477-8429-1267ebf0cccd service nova] [instance: 48d9b18d-04b5-44e4-809e-383819d39418] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 755.366911] env[62503]: DEBUG oslo_vmware.api [None req-8c63ebde-167f-4614-8d90-7622283ae73c tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Task: {'id': task-1387736, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.460365} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 755.366911] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-8c63ebde-167f-4614-8d90-7622283ae73c tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk to [datastore2] d4990c66-63d5-43b0-8187-2074c99ccde2/d4990c66-63d5-43b0-8187-2074c99ccde2.vmdk {{(pid=62503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 755.367222] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-8c63ebde-167f-4614-8d90-7622283ae73c tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] [instance: d4990c66-63d5-43b0-8187-2074c99ccde2] Extending root virtual disk to 1048576 {{(pid=62503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 755.367400] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-73615ce6-0e80-406e-a6fd-3d58cbb0df69 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.373913] env[62503]: DEBUG oslo_vmware.api [None req-8c63ebde-167f-4614-8d90-7622283ae73c tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Waiting for the task: (returnval){ [ 755.373913] env[62503]: value = "task-1387737" [ 755.373913] env[62503]: _type = "Task" [ 755.373913] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 755.381429] env[62503]: DEBUG oslo_vmware.api [None req-8c63ebde-167f-4614-8d90-7622283ae73c tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Task: {'id': task-1387737, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 755.416623] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4448d91-ddba-42bf-9906-32514d5567af {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.424277] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0aa75ced-c846-44b1-8577-373f86f969a3 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.454204] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78c90d36-7afc-4a49-ad7b-1048d9b0827e {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.461395] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df403e8f-ff14-49d9-8fc8-8b0c528962c5 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.474268] env[62503]: DEBUG nova.compute.provider_tree [None req-5f6b79ba-d091-4f79-8bf5-714be49e7af9 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] Updating inventory in ProviderTree for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 755.706953] env[62503]: DEBUG oslo_vmware.api [None req-57a38f06-0431-4714-9351-3e81164f0fed tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Task: {'id': task-1387735, 'name': PowerOnVM_Task, 'duration_secs': 1.227918} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 755.707246] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-57a38f06-0431-4714-9351-3e81164f0fed tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] [instance: 141d7d04-0267-4e15-90ed-112ac8fb8c9b] Powered on the VM {{(pid=62503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 755.707447] env[62503]: INFO nova.compute.manager [None req-57a38f06-0431-4714-9351-3e81164f0fed tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] [instance: 141d7d04-0267-4e15-90ed-112ac8fb8c9b] Took 5.39 seconds to spawn the instance on the hypervisor. [ 755.707624] env[62503]: DEBUG nova.compute.manager [None req-57a38f06-0431-4714-9351-3e81164f0fed tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] [instance: 141d7d04-0267-4e15-90ed-112ac8fb8c9b] Checking state {{(pid=62503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1794}} [ 755.708386] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40bafae4-c4ae-4599-a7f4-881d60543907 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.843206] env[62503]: DEBUG oslo_concurrency.lockutils [req-89c39aae-cf6b-4017-9e8e-006b744d61d2 req-4a087bf8-a597-4477-8429-1267ebf0cccd service nova] Releasing lock "refresh_cache-48d9b18d-04b5-44e4-809e-383819d39418" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 755.843631] env[62503]: DEBUG oslo_concurrency.lockutils [None req-35b849ab-bc6b-4b84-91df-53a0636c028a tempest-ServerMetadataNegativeTestJSON-362530580 tempest-ServerMetadataNegativeTestJSON-362530580-project-member] Acquired lock "refresh_cache-48d9b18d-04b5-44e4-809e-383819d39418" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 755.843817] env[62503]: DEBUG nova.network.neutron [None req-35b849ab-bc6b-4b84-91df-53a0636c028a tempest-ServerMetadataNegativeTestJSON-362530580 tempest-ServerMetadataNegativeTestJSON-362530580-project-member] [instance: 48d9b18d-04b5-44e4-809e-383819d39418] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 755.888483] env[62503]: DEBUG oslo_vmware.api [None req-8c63ebde-167f-4614-8d90-7622283ae73c tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Task: {'id': task-1387737, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063024} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 755.888483] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-8c63ebde-167f-4614-8d90-7622283ae73c tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] [instance: d4990c66-63d5-43b0-8187-2074c99ccde2] Extended root virtual disk {{(pid=62503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 755.888483] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abeab0b8-5400-4ea8-a687-43c4553ec729 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.911484] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-8c63ebde-167f-4614-8d90-7622283ae73c tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] [instance: d4990c66-63d5-43b0-8187-2074c99ccde2] Reconfiguring VM instance instance-0000002d to attach disk [datastore2] d4990c66-63d5-43b0-8187-2074c99ccde2/d4990c66-63d5-43b0-8187-2074c99ccde2.vmdk or device None with type sparse {{(pid=62503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 755.912547] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e67d3eff-1987-4e61-88ef-3659dc537328 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.936249] env[62503]: DEBUG oslo_vmware.api [None req-8c63ebde-167f-4614-8d90-7622283ae73c tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Waiting for the task: (returnval){ [ 755.936249] env[62503]: value = "task-1387738" [ 755.936249] env[62503]: _type = "Task" [ 755.936249] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 755.946057] env[62503]: DEBUG oslo_vmware.api [None req-8c63ebde-167f-4614-8d90-7622283ae73c tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Task: {'id': task-1387738, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 756.008527] env[62503]: DEBUG nova.scheduler.client.report [None req-5f6b79ba-d091-4f79-8bf5-714be49e7af9 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] Updated inventory for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 with generation 70 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:972}} [ 756.008527] env[62503]: DEBUG nova.compute.provider_tree [None req-5f6b79ba-d091-4f79-8bf5-714be49e7af9 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] Updating resource provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 generation from 70 to 71 during operation: update_inventory {{(pid=62503) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 756.008527] env[62503]: DEBUG nova.compute.provider_tree [None req-5f6b79ba-d091-4f79-8bf5-714be49e7af9 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] Updating inventory in ProviderTree for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 756.224523] env[62503]: INFO nova.compute.manager [None req-57a38f06-0431-4714-9351-3e81164f0fed tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] [instance: 141d7d04-0267-4e15-90ed-112ac8fb8c9b] Took 23.79 seconds to build instance. [ 756.362474] env[62503]: DEBUG nova.network.neutron [None req-35b849ab-bc6b-4b84-91df-53a0636c028a tempest-ServerMetadataNegativeTestJSON-362530580 tempest-ServerMetadataNegativeTestJSON-362530580-project-member] [instance: 48d9b18d-04b5-44e4-809e-383819d39418] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 756.446143] env[62503]: DEBUG oslo_vmware.api [None req-8c63ebde-167f-4614-8d90-7622283ae73c tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Task: {'id': task-1387738, 'name': ReconfigVM_Task, 'duration_secs': 0.273873} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 756.446452] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-8c63ebde-167f-4614-8d90-7622283ae73c tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] [instance: d4990c66-63d5-43b0-8187-2074c99ccde2] Reconfigured VM instance instance-0000002d to attach disk [datastore2] d4990c66-63d5-43b0-8187-2074c99ccde2/d4990c66-63d5-43b0-8187-2074c99ccde2.vmdk or device None with type sparse {{(pid=62503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 756.447167] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f1acf579-7bf4-4689-a9c6-2efe040dc885 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.449358] env[62503]: DEBUG nova.network.neutron [None req-35b849ab-bc6b-4b84-91df-53a0636c028a tempest-ServerMetadataNegativeTestJSON-362530580 tempest-ServerMetadataNegativeTestJSON-362530580-project-member] [instance: 48d9b18d-04b5-44e4-809e-383819d39418] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 756.455174] env[62503]: DEBUG oslo_vmware.api [None req-8c63ebde-167f-4614-8d90-7622283ae73c tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Waiting for the task: (returnval){ [ 756.455174] env[62503]: value = "task-1387739" [ 756.455174] env[62503]: _type = "Task" [ 756.455174] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 756.465134] env[62503]: DEBUG oslo_vmware.api [None req-8c63ebde-167f-4614-8d90-7622283ae73c tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Task: {'id': task-1387739, 'name': Rename_Task} progress is 5%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 756.512220] env[62503]: DEBUG oslo_concurrency.lockutils [None req-5f6b79ba-d091-4f79-8bf5-714be49e7af9 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.433s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 756.512729] env[62503]: DEBUG nova.compute.manager [None req-5f6b79ba-d091-4f79-8bf5-714be49e7af9 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] [instance: b9259ced-344a-42e5-835d-3713631a68c7] Start building networks asynchronously for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2832}} [ 756.515361] env[62503]: DEBUG oslo_concurrency.lockutils [None req-7b8e1dad-5e5d-4734-b8ed-b199db0d06eb tempest-ServersNegativeTestMultiTenantJSON-701390068 tempest-ServersNegativeTestMultiTenantJSON-701390068-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.290s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 756.516735] env[62503]: INFO nova.compute.claims [None req-7b8e1dad-5e5d-4734-b8ed-b199db0d06eb tempest-ServersNegativeTestMultiTenantJSON-701390068 tempest-ServersNegativeTestMultiTenantJSON-701390068-project-member] [instance: cf6fb485-9672-42b5-ac88-bbf5e0941393] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 756.727068] env[62503]: DEBUG oslo_concurrency.lockutils [None req-57a38f06-0431-4714-9351-3e81164f0fed tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Lock "141d7d04-0267-4e15-90ed-112ac8fb8c9b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 115.530s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 756.739298] env[62503]: DEBUG nova.compute.manager [req-38f9d273-28ed-4827-8dc8-4bbd04114124 req-060f6938-86fb-4f1f-bd4a-b22ffd9f0ea8 service nova] [instance: 48d9b18d-04b5-44e4-809e-383819d39418] Received event network-vif-deleted-1f509a03-3771-46fe-a0f9-4aed7d32e203 {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 756.952248] env[62503]: DEBUG oslo_concurrency.lockutils [None req-35b849ab-bc6b-4b84-91df-53a0636c028a tempest-ServerMetadataNegativeTestJSON-362530580 tempest-ServerMetadataNegativeTestJSON-362530580-project-member] Releasing lock "refresh_cache-48d9b18d-04b5-44e4-809e-383819d39418" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 756.952684] env[62503]: DEBUG nova.compute.manager [None req-35b849ab-bc6b-4b84-91df-53a0636c028a tempest-ServerMetadataNegativeTestJSON-362530580 tempest-ServerMetadataNegativeTestJSON-362530580-project-member] [instance: 48d9b18d-04b5-44e4-809e-383819d39418] Start destroying the instance on the hypervisor. {{(pid=62503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3156}} [ 756.952882] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-35b849ab-bc6b-4b84-91df-53a0636c028a tempest-ServerMetadataNegativeTestJSON-362530580 tempest-ServerMetadataNegativeTestJSON-362530580-project-member] [instance: 48d9b18d-04b5-44e4-809e-383819d39418] Destroying instance {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 756.953634] env[62503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b0a43c14-6c8b-4fde-898c-1c01dc15aab0 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.964560] env[62503]: DEBUG oslo_vmware.api [None req-8c63ebde-167f-4614-8d90-7622283ae73c tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Task: {'id': task-1387739, 'name': Rename_Task, 'duration_secs': 0.132892} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 756.965641] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-8c63ebde-167f-4614-8d90-7622283ae73c tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] [instance: d4990c66-63d5-43b0-8187-2074c99ccde2] Powering on the VM {{(pid=62503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 756.965913] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b7853d19-d2c2-4901-a57c-1f65ae75bc6a {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.970059] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-389c5f70-3646-44e0-bfc4-21891d4c8d7a {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.984789] env[62503]: DEBUG oslo_vmware.api [None req-8c63ebde-167f-4614-8d90-7622283ae73c tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Waiting for the task: (returnval){ [ 756.984789] env[62503]: value = "task-1387740" [ 756.984789] env[62503]: _type = "Task" [ 756.984789] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 756.992777] env[62503]: WARNING nova.virt.vmwareapi.vmops [None req-35b849ab-bc6b-4b84-91df-53a0636c028a tempest-ServerMetadataNegativeTestJSON-362530580 tempest-ServerMetadataNegativeTestJSON-362530580-project-member] [instance: 48d9b18d-04b5-44e4-809e-383819d39418] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 48d9b18d-04b5-44e4-809e-383819d39418 could not be found. [ 756.992873] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-35b849ab-bc6b-4b84-91df-53a0636c028a tempest-ServerMetadataNegativeTestJSON-362530580 tempest-ServerMetadataNegativeTestJSON-362530580-project-member] [instance: 48d9b18d-04b5-44e4-809e-383819d39418] Instance destroyed {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 756.992982] env[62503]: INFO nova.compute.manager [None req-35b849ab-bc6b-4b84-91df-53a0636c028a tempest-ServerMetadataNegativeTestJSON-362530580 tempest-ServerMetadataNegativeTestJSON-362530580-project-member] [instance: 48d9b18d-04b5-44e4-809e-383819d39418] Took 0.04 seconds to destroy the instance on the hypervisor. [ 756.993234] env[62503]: DEBUG oslo.service.loopingcall [None req-35b849ab-bc6b-4b84-91df-53a0636c028a tempest-ServerMetadataNegativeTestJSON-362530580 tempest-ServerMetadataNegativeTestJSON-362530580-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 756.993755] env[62503]: DEBUG nova.compute.manager [-] [instance: 48d9b18d-04b5-44e4-809e-383819d39418] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 756.993854] env[62503]: DEBUG nova.network.neutron [-] [instance: 48d9b18d-04b5-44e4-809e-383819d39418] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 756.999269] env[62503]: DEBUG oslo_vmware.api [None req-8c63ebde-167f-4614-8d90-7622283ae73c tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Task: {'id': task-1387740, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 757.008561] env[62503]: DEBUG nova.network.neutron [-] [instance: 48d9b18d-04b5-44e4-809e-383819d39418] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 757.023391] env[62503]: DEBUG nova.compute.utils [None req-5f6b79ba-d091-4f79-8bf5-714be49e7af9 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] Using /dev/sd instead of None {{(pid=62503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 757.025012] env[62503]: DEBUG nova.compute.manager [None req-5f6b79ba-d091-4f79-8bf5-714be49e7af9 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] [instance: b9259ced-344a-42e5-835d-3713631a68c7] Allocating IP information in the background. {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 757.025187] env[62503]: DEBUG nova.network.neutron [None req-5f6b79ba-d091-4f79-8bf5-714be49e7af9 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] [instance: b9259ced-344a-42e5-835d-3713631a68c7] allocate_for_instance() {{(pid=62503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 757.067960] env[62503]: DEBUG nova.policy [None req-5f6b79ba-d091-4f79-8bf5-714be49e7af9 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8c41319365a5412b9bf7480a7edba4bb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd69e3630f8144c288f8685c2201779ef', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62503) authorize /opt/stack/nova/nova/policy.py:201}} [ 757.229560] env[62503]: DEBUG nova.compute.manager [None req-3dbf8d33-063c-4c40-92c7-9bf6b35e4526 tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] [instance: 32d4fda5-6d30-4416-b187-cf5548cb23bf] Starting instance... {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 757.425058] env[62503]: DEBUG nova.network.neutron [None req-5f6b79ba-d091-4f79-8bf5-714be49e7af9 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] [instance: b9259ced-344a-42e5-835d-3713631a68c7] Successfully created port: cb70e532-2d98-4792-8aa5-ace14414265b {{(pid=62503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 757.495570] env[62503]: DEBUG oslo_vmware.api [None req-8c63ebde-167f-4614-8d90-7622283ae73c tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Task: {'id': task-1387740, 'name': PowerOnVM_Task, 'duration_secs': 0.431255} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 757.495977] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-8c63ebde-167f-4614-8d90-7622283ae73c tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] [instance: d4990c66-63d5-43b0-8187-2074c99ccde2] Powered on the VM {{(pid=62503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 757.496309] env[62503]: INFO nova.compute.manager [None req-8c63ebde-167f-4614-8d90-7622283ae73c tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] [instance: d4990c66-63d5-43b0-8187-2074c99ccde2] Took 4.81 seconds to spawn the instance on the hypervisor. [ 757.496511] env[62503]: DEBUG nova.compute.manager [None req-8c63ebde-167f-4614-8d90-7622283ae73c tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] [instance: d4990c66-63d5-43b0-8187-2074c99ccde2] Checking state {{(pid=62503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1794}} [ 757.497328] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5781bb8f-2e94-4c02-8123-0528e473f57e {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.511343] env[62503]: DEBUG nova.network.neutron [-] [instance: 48d9b18d-04b5-44e4-809e-383819d39418] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 757.525450] env[62503]: DEBUG nova.compute.manager [None req-5f6b79ba-d091-4f79-8bf5-714be49e7af9 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] [instance: b9259ced-344a-42e5-835d-3713631a68c7] Start building block device mappings for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2867}} [ 757.753251] env[62503]: DEBUG oslo_concurrency.lockutils [None req-3dbf8d33-063c-4c40-92c7-9bf6b35e4526 tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 757.848260] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd1e2377-5b93-49d7-9749-540c0d3c0837 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.856246] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31f59b7e-09f6-4135-a5bf-fae176d4cf41 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.886888] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b92eb922-e09a-456f-8c03-3b8c560e8f00 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.894837] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d33e605a-6e7d-4a94-9aa6-61107166a851 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.909379] env[62503]: DEBUG nova.compute.provider_tree [None req-7b8e1dad-5e5d-4734-b8ed-b199db0d06eb tempest-ServersNegativeTestMultiTenantJSON-701390068 tempest-ServersNegativeTestMultiTenantJSON-701390068-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 758.013917] env[62503]: INFO nova.compute.manager [-] [instance: 48d9b18d-04b5-44e4-809e-383819d39418] Took 1.02 seconds to deallocate network for instance. [ 758.016864] env[62503]: INFO nova.compute.manager [None req-8c63ebde-167f-4614-8d90-7622283ae73c tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] [instance: d4990c66-63d5-43b0-8187-2074c99ccde2] Took 23.67 seconds to build instance. [ 758.027809] env[62503]: DEBUG nova.compute.claims [None req-35b849ab-bc6b-4b84-91df-53a0636c028a tempest-ServerMetadataNegativeTestJSON-362530580 tempest-ServerMetadataNegativeTestJSON-362530580-project-member] [instance: 48d9b18d-04b5-44e4-809e-383819d39418] Aborting claim: {{(pid=62503) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 758.028015] env[62503]: DEBUG oslo_concurrency.lockutils [None req-35b849ab-bc6b-4b84-91df-53a0636c028a tempest-ServerMetadataNegativeTestJSON-362530580 tempest-ServerMetadataNegativeTestJSON-362530580-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 758.414030] env[62503]: DEBUG nova.scheduler.client.report [None req-7b8e1dad-5e5d-4734-b8ed-b199db0d06eb tempest-ServersNegativeTestMultiTenantJSON-701390068 tempest-ServersNegativeTestMultiTenantJSON-701390068-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 758.435947] env[62503]: ERROR nova.compute.manager [None req-5f6b79ba-d091-4f79-8bf5-714be49e7af9 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port cb70e532-2d98-4792-8aa5-ace14414265b, please check neutron logs for more information. [ 758.435947] env[62503]: ERROR nova.compute.manager Traceback (most recent call last): [ 758.435947] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 758.435947] env[62503]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 758.435947] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 758.435947] env[62503]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 758.435947] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 758.435947] env[62503]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 758.435947] env[62503]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 758.435947] env[62503]: ERROR nova.compute.manager self.force_reraise() [ 758.435947] env[62503]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 758.435947] env[62503]: ERROR nova.compute.manager raise self.value [ 758.435947] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 758.435947] env[62503]: ERROR nova.compute.manager updated_port = self._update_port( [ 758.435947] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 758.435947] env[62503]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 758.436744] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 758.436744] env[62503]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 758.436744] env[62503]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port cb70e532-2d98-4792-8aa5-ace14414265b, please check neutron logs for more information. [ 758.436744] env[62503]: ERROR nova.compute.manager [ 758.436744] env[62503]: Traceback (most recent call last): [ 758.436744] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 758.436744] env[62503]: listener.cb(fileno) [ 758.436744] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 758.436744] env[62503]: result = function(*args, **kwargs) [ 758.436744] env[62503]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 758.436744] env[62503]: return func(*args, **kwargs) [ 758.436744] env[62503]: File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 758.436744] env[62503]: raise e [ 758.436744] env[62503]: File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 758.436744] env[62503]: nwinfo = self.network_api.allocate_for_instance( [ 758.436744] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 758.436744] env[62503]: created_port_ids = self._update_ports_for_instance( [ 758.436744] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 758.436744] env[62503]: with excutils.save_and_reraise_exception(): [ 758.436744] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 758.436744] env[62503]: self.force_reraise() [ 758.436744] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 758.436744] env[62503]: raise self.value [ 758.436744] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 758.436744] env[62503]: updated_port = self._update_port( [ 758.436744] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 758.436744] env[62503]: _ensure_no_port_binding_failure(port) [ 758.436744] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 758.436744] env[62503]: raise exception.PortBindingFailed(port_id=port['id']) [ 758.437698] env[62503]: nova.exception.PortBindingFailed: Binding failed for port cb70e532-2d98-4792-8aa5-ace14414265b, please check neutron logs for more information. [ 758.437698] env[62503]: Removing descriptor: 16 [ 758.520218] env[62503]: DEBUG oslo_concurrency.lockutils [None req-8c63ebde-167f-4614-8d90-7622283ae73c tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Lock "d4990c66-63d5-43b0-8187-2074c99ccde2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 116.435s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 758.537575] env[62503]: DEBUG nova.compute.manager [None req-5f6b79ba-d091-4f79-8bf5-714be49e7af9 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] [instance: b9259ced-344a-42e5-835d-3713631a68c7] Start spawning the instance on the hypervisor. {{(pid=62503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2641}} [ 758.560726] env[62503]: DEBUG nova.virt.hardware [None req-5f6b79ba-d091-4f79-8bf5-714be49e7af9 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:26:20Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:26:03Z,direct_url=,disk_format='vmdk',id=8150ca02-f879-471d-8913-459408f127a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26d9ee75d25b4018b8bb1fb11c8bd98c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:26:04Z,virtual_size=,visibility=), allow threads: False {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 758.560988] env[62503]: DEBUG nova.virt.hardware [None req-5f6b79ba-d091-4f79-8bf5-714be49e7af9 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] Flavor limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 758.561197] env[62503]: DEBUG nova.virt.hardware [None req-5f6b79ba-d091-4f79-8bf5-714be49e7af9 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] Image limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 758.561392] env[62503]: DEBUG nova.virt.hardware [None req-5f6b79ba-d091-4f79-8bf5-714be49e7af9 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] Flavor pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 758.561539] env[62503]: DEBUG nova.virt.hardware [None req-5f6b79ba-d091-4f79-8bf5-714be49e7af9 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] Image pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 758.561687] env[62503]: DEBUG nova.virt.hardware [None req-5f6b79ba-d091-4f79-8bf5-714be49e7af9 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 758.561894] env[62503]: DEBUG nova.virt.hardware [None req-5f6b79ba-d091-4f79-8bf5-714be49e7af9 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 758.562069] env[62503]: DEBUG nova.virt.hardware [None req-5f6b79ba-d091-4f79-8bf5-714be49e7af9 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 758.562238] env[62503]: DEBUG nova.virt.hardware [None req-5f6b79ba-d091-4f79-8bf5-714be49e7af9 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] Got 1 possible topologies {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 758.562401] env[62503]: DEBUG nova.virt.hardware [None req-5f6b79ba-d091-4f79-8bf5-714be49e7af9 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 758.562573] env[62503]: DEBUG nova.virt.hardware [None req-5f6b79ba-d091-4f79-8bf5-714be49e7af9 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 758.563439] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b699a060-a726-4990-b3c6-91629ac37d39 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.571549] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be900af0-27e6-4869-9fdb-5d23f248591f {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.585194] env[62503]: ERROR nova.compute.manager [None req-5f6b79ba-d091-4f79-8bf5-714be49e7af9 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] [instance: b9259ced-344a-42e5-835d-3713631a68c7] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port cb70e532-2d98-4792-8aa5-ace14414265b, please check neutron logs for more information. [ 758.585194] env[62503]: ERROR nova.compute.manager [instance: b9259ced-344a-42e5-835d-3713631a68c7] Traceback (most recent call last): [ 758.585194] env[62503]: ERROR nova.compute.manager [instance: b9259ced-344a-42e5-835d-3713631a68c7] File "/opt/stack/nova/nova/compute/manager.py", line 2897, in _build_resources [ 758.585194] env[62503]: ERROR nova.compute.manager [instance: b9259ced-344a-42e5-835d-3713631a68c7] yield resources [ 758.585194] env[62503]: ERROR nova.compute.manager [instance: b9259ced-344a-42e5-835d-3713631a68c7] File "/opt/stack/nova/nova/compute/manager.py", line 2644, in _build_and_run_instance [ 758.585194] env[62503]: ERROR nova.compute.manager [instance: b9259ced-344a-42e5-835d-3713631a68c7] self.driver.spawn(context, instance, image_meta, [ 758.585194] env[62503]: ERROR nova.compute.manager [instance: b9259ced-344a-42e5-835d-3713631a68c7] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 758.585194] env[62503]: ERROR nova.compute.manager [instance: b9259ced-344a-42e5-835d-3713631a68c7] self._vmops.spawn(context, instance, image_meta, injected_files, [ 758.585194] env[62503]: ERROR nova.compute.manager [instance: b9259ced-344a-42e5-835d-3713631a68c7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 758.585194] env[62503]: ERROR nova.compute.manager [instance: b9259ced-344a-42e5-835d-3713631a68c7] vm_ref = self.build_virtual_machine(instance, [ 758.585194] env[62503]: ERROR nova.compute.manager [instance: b9259ced-344a-42e5-835d-3713631a68c7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 758.585600] env[62503]: ERROR nova.compute.manager [instance: b9259ced-344a-42e5-835d-3713631a68c7] vif_infos = vmwarevif.get_vif_info(self._session, [ 758.585600] env[62503]: ERROR nova.compute.manager [instance: b9259ced-344a-42e5-835d-3713631a68c7] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 758.585600] env[62503]: ERROR nova.compute.manager [instance: b9259ced-344a-42e5-835d-3713631a68c7] for vif in network_info: [ 758.585600] env[62503]: ERROR nova.compute.manager [instance: b9259ced-344a-42e5-835d-3713631a68c7] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 758.585600] env[62503]: ERROR nova.compute.manager [instance: b9259ced-344a-42e5-835d-3713631a68c7] return self._sync_wrapper(fn, *args, **kwargs) [ 758.585600] env[62503]: ERROR nova.compute.manager [instance: b9259ced-344a-42e5-835d-3713631a68c7] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 758.585600] env[62503]: ERROR nova.compute.manager [instance: b9259ced-344a-42e5-835d-3713631a68c7] self.wait() [ 758.585600] env[62503]: ERROR nova.compute.manager [instance: b9259ced-344a-42e5-835d-3713631a68c7] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 758.585600] env[62503]: ERROR nova.compute.manager [instance: b9259ced-344a-42e5-835d-3713631a68c7] self[:] = self._gt.wait() [ 758.585600] env[62503]: ERROR nova.compute.manager [instance: b9259ced-344a-42e5-835d-3713631a68c7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 758.585600] env[62503]: ERROR nova.compute.manager [instance: b9259ced-344a-42e5-835d-3713631a68c7] return self._exit_event.wait() [ 758.585600] env[62503]: ERROR nova.compute.manager [instance: b9259ced-344a-42e5-835d-3713631a68c7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 758.585600] env[62503]: ERROR nova.compute.manager [instance: b9259ced-344a-42e5-835d-3713631a68c7] current.throw(*self._exc) [ 758.586015] env[62503]: ERROR nova.compute.manager [instance: b9259ced-344a-42e5-835d-3713631a68c7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 758.586015] env[62503]: ERROR nova.compute.manager [instance: b9259ced-344a-42e5-835d-3713631a68c7] result = function(*args, **kwargs) [ 758.586015] env[62503]: ERROR nova.compute.manager [instance: b9259ced-344a-42e5-835d-3713631a68c7] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 758.586015] env[62503]: ERROR nova.compute.manager [instance: b9259ced-344a-42e5-835d-3713631a68c7] return func(*args, **kwargs) [ 758.586015] env[62503]: ERROR nova.compute.manager [instance: b9259ced-344a-42e5-835d-3713631a68c7] File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 758.586015] env[62503]: ERROR nova.compute.manager [instance: b9259ced-344a-42e5-835d-3713631a68c7] raise e [ 758.586015] env[62503]: ERROR nova.compute.manager [instance: b9259ced-344a-42e5-835d-3713631a68c7] File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 758.586015] env[62503]: ERROR nova.compute.manager [instance: b9259ced-344a-42e5-835d-3713631a68c7] nwinfo = self.network_api.allocate_for_instance( [ 758.586015] env[62503]: ERROR nova.compute.manager [instance: b9259ced-344a-42e5-835d-3713631a68c7] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 758.586015] env[62503]: ERROR nova.compute.manager [instance: b9259ced-344a-42e5-835d-3713631a68c7] created_port_ids = self._update_ports_for_instance( [ 758.586015] env[62503]: ERROR nova.compute.manager [instance: b9259ced-344a-42e5-835d-3713631a68c7] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 758.586015] env[62503]: ERROR nova.compute.manager [instance: b9259ced-344a-42e5-835d-3713631a68c7] with excutils.save_and_reraise_exception(): [ 758.586015] env[62503]: ERROR nova.compute.manager [instance: b9259ced-344a-42e5-835d-3713631a68c7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 758.586409] env[62503]: ERROR nova.compute.manager [instance: b9259ced-344a-42e5-835d-3713631a68c7] self.force_reraise() [ 758.586409] env[62503]: ERROR nova.compute.manager [instance: b9259ced-344a-42e5-835d-3713631a68c7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 758.586409] env[62503]: ERROR nova.compute.manager [instance: b9259ced-344a-42e5-835d-3713631a68c7] raise self.value [ 758.586409] env[62503]: ERROR nova.compute.manager [instance: b9259ced-344a-42e5-835d-3713631a68c7] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 758.586409] env[62503]: ERROR nova.compute.manager [instance: b9259ced-344a-42e5-835d-3713631a68c7] updated_port = self._update_port( [ 758.586409] env[62503]: ERROR nova.compute.manager [instance: b9259ced-344a-42e5-835d-3713631a68c7] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 758.586409] env[62503]: ERROR nova.compute.manager [instance: b9259ced-344a-42e5-835d-3713631a68c7] _ensure_no_port_binding_failure(port) [ 758.586409] env[62503]: ERROR nova.compute.manager [instance: b9259ced-344a-42e5-835d-3713631a68c7] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 758.586409] env[62503]: ERROR nova.compute.manager [instance: b9259ced-344a-42e5-835d-3713631a68c7] raise exception.PortBindingFailed(port_id=port['id']) [ 758.586409] env[62503]: ERROR nova.compute.manager [instance: b9259ced-344a-42e5-835d-3713631a68c7] nova.exception.PortBindingFailed: Binding failed for port cb70e532-2d98-4792-8aa5-ace14414265b, please check neutron logs for more information. [ 758.586409] env[62503]: ERROR nova.compute.manager [instance: b9259ced-344a-42e5-835d-3713631a68c7] [ 758.586409] env[62503]: INFO nova.compute.manager [None req-5f6b79ba-d091-4f79-8bf5-714be49e7af9 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] [instance: b9259ced-344a-42e5-835d-3713631a68c7] Terminating instance [ 758.590863] env[62503]: DEBUG oslo_concurrency.lockutils [None req-5f6b79ba-d091-4f79-8bf5-714be49e7af9 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] Acquiring lock "refresh_cache-b9259ced-344a-42e5-835d-3713631a68c7" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 758.591061] env[62503]: DEBUG oslo_concurrency.lockutils [None req-5f6b79ba-d091-4f79-8bf5-714be49e7af9 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] Acquired lock "refresh_cache-b9259ced-344a-42e5-835d-3713631a68c7" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 758.591245] env[62503]: DEBUG nova.network.neutron [None req-5f6b79ba-d091-4f79-8bf5-714be49e7af9 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] [instance: b9259ced-344a-42e5-835d-3713631a68c7] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 758.825401] env[62503]: DEBUG nova.compute.manager [req-32f4cb43-9113-487c-a067-058cfa750949 req-8bab0a6b-2cec-48ee-9207-f2f05161b8be service nova] [instance: b9259ced-344a-42e5-835d-3713631a68c7] Received event network-changed-cb70e532-2d98-4792-8aa5-ace14414265b {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 758.825401] env[62503]: DEBUG nova.compute.manager [req-32f4cb43-9113-487c-a067-058cfa750949 req-8bab0a6b-2cec-48ee-9207-f2f05161b8be service nova] [instance: b9259ced-344a-42e5-835d-3713631a68c7] Refreshing instance network info cache due to event network-changed-cb70e532-2d98-4792-8aa5-ace14414265b. {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11432}} [ 758.825401] env[62503]: DEBUG oslo_concurrency.lockutils [req-32f4cb43-9113-487c-a067-058cfa750949 req-8bab0a6b-2cec-48ee-9207-f2f05161b8be service nova] Acquiring lock "refresh_cache-b9259ced-344a-42e5-835d-3713631a68c7" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 758.882038] env[62503]: INFO nova.compute.manager [None req-0b764e2d-6aca-40d5-ae09-46fe33425281 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] [instance: d4990c66-63d5-43b0-8187-2074c99ccde2] Rebuilding instance [ 758.922495] env[62503]: DEBUG oslo_concurrency.lockutils [None req-7b8e1dad-5e5d-4734-b8ed-b199db0d06eb tempest-ServersNegativeTestMultiTenantJSON-701390068 tempest-ServersNegativeTestMultiTenantJSON-701390068-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.407s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 758.922943] env[62503]: DEBUG nova.compute.manager [None req-7b8e1dad-5e5d-4734-b8ed-b199db0d06eb tempest-ServersNegativeTestMultiTenantJSON-701390068 tempest-ServersNegativeTestMultiTenantJSON-701390068-project-member] [instance: cf6fb485-9672-42b5-ac88-bbf5e0941393] Start building networks asynchronously for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2832}} [ 758.925471] env[62503]: DEBUG nova.compute.manager [None req-0b764e2d-6aca-40d5-ae09-46fe33425281 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] [instance: d4990c66-63d5-43b0-8187-2074c99ccde2] Checking state {{(pid=62503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1794}} [ 758.925775] env[62503]: DEBUG oslo_concurrency.lockutils [None req-be269e0c-079d-42ba-b169-f1c6935976f8 tempest-ServerMetadataTestJSON-2107799143 tempest-ServerMetadataTestJSON-2107799143-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.847s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 758.927172] env[62503]: INFO nova.compute.claims [None req-be269e0c-079d-42ba-b169-f1c6935976f8 tempest-ServerMetadataTestJSON-2107799143 tempest-ServerMetadataTestJSON-2107799143-project-member] [instance: ff56659a-18f8-44c5-ab10-872e636a9357] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 758.930011] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d27c4469-99e1-41d2-9665-21bbd3e632d6 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.022539] env[62503]: DEBUG nova.compute.manager [None req-bed06986-6337-47b4-a3da-32c888b74905 tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] [instance: a4b600d2-b411-4957-92cb-7e8e462fde1d] Starting instance... {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 759.112087] env[62503]: DEBUG nova.network.neutron [None req-5f6b79ba-d091-4f79-8bf5-714be49e7af9 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] [instance: b9259ced-344a-42e5-835d-3713631a68c7] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 759.219812] env[62503]: DEBUG nova.network.neutron [None req-5f6b79ba-d091-4f79-8bf5-714be49e7af9 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] [instance: b9259ced-344a-42e5-835d-3713631a68c7] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 759.436712] env[62503]: DEBUG nova.compute.utils [None req-7b8e1dad-5e5d-4734-b8ed-b199db0d06eb tempest-ServersNegativeTestMultiTenantJSON-701390068 tempest-ServersNegativeTestMultiTenantJSON-701390068-project-member] Using /dev/sd instead of None {{(pid=62503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 759.439218] env[62503]: DEBUG nova.compute.manager [None req-7b8e1dad-5e5d-4734-b8ed-b199db0d06eb tempest-ServersNegativeTestMultiTenantJSON-701390068 tempest-ServersNegativeTestMultiTenantJSON-701390068-project-member] [instance: cf6fb485-9672-42b5-ac88-bbf5e0941393] Allocating IP information in the background. {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 759.439407] env[62503]: DEBUG nova.network.neutron [None req-7b8e1dad-5e5d-4734-b8ed-b199db0d06eb tempest-ServersNegativeTestMultiTenantJSON-701390068 tempest-ServersNegativeTestMultiTenantJSON-701390068-project-member] [instance: cf6fb485-9672-42b5-ac88-bbf5e0941393] allocate_for_instance() {{(pid=62503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 759.492693] env[62503]: DEBUG nova.policy [None req-7b8e1dad-5e5d-4734-b8ed-b199db0d06eb tempest-ServersNegativeTestMultiTenantJSON-701390068 tempest-ServersNegativeTestMultiTenantJSON-701390068-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f39b023f4a47425ab8c32b193807bba3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f5a881a218c04537af9d4f4a0eb30de3', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62503) authorize /opt/stack/nova/nova/policy.py:201}} [ 759.540922] env[62503]: DEBUG oslo_concurrency.lockutils [None req-bed06986-6337-47b4-a3da-32c888b74905 tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 759.722632] env[62503]: DEBUG oslo_concurrency.lockutils [None req-5f6b79ba-d091-4f79-8bf5-714be49e7af9 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] Releasing lock "refresh_cache-b9259ced-344a-42e5-835d-3713631a68c7" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 759.723060] env[62503]: DEBUG nova.compute.manager [None req-5f6b79ba-d091-4f79-8bf5-714be49e7af9 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] [instance: b9259ced-344a-42e5-835d-3713631a68c7] Start destroying the instance on the hypervisor. {{(pid=62503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3156}} [ 759.723261] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-5f6b79ba-d091-4f79-8bf5-714be49e7af9 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] [instance: b9259ced-344a-42e5-835d-3713631a68c7] Destroying instance {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 759.723570] env[62503]: DEBUG oslo_concurrency.lockutils [req-32f4cb43-9113-487c-a067-058cfa750949 req-8bab0a6b-2cec-48ee-9207-f2f05161b8be service nova] Acquired lock "refresh_cache-b9259ced-344a-42e5-835d-3713631a68c7" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 759.723740] env[62503]: DEBUG nova.network.neutron [req-32f4cb43-9113-487c-a067-058cfa750949 req-8bab0a6b-2cec-48ee-9207-f2f05161b8be service nova] [instance: b9259ced-344a-42e5-835d-3713631a68c7] Refreshing network info cache for port cb70e532-2d98-4792-8aa5-ace14414265b {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 759.724800] env[62503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a43cb144-fbce-40ac-bb71-ac49479e1f93 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.734260] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cb6f622-d2b7-4598-ad68-43172a61805a {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.758750] env[62503]: WARNING nova.virt.vmwareapi.vmops [None req-5f6b79ba-d091-4f79-8bf5-714be49e7af9 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] [instance: b9259ced-344a-42e5-835d-3713631a68c7] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance b9259ced-344a-42e5-835d-3713631a68c7 could not be found. [ 759.758980] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-5f6b79ba-d091-4f79-8bf5-714be49e7af9 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] [instance: b9259ced-344a-42e5-835d-3713631a68c7] Instance destroyed {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 759.759176] env[62503]: INFO nova.compute.manager [None req-5f6b79ba-d091-4f79-8bf5-714be49e7af9 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] [instance: b9259ced-344a-42e5-835d-3713631a68c7] Took 0.04 seconds to destroy the instance on the hypervisor. [ 759.759413] env[62503]: DEBUG oslo.service.loopingcall [None req-5f6b79ba-d091-4f79-8bf5-714be49e7af9 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 759.759633] env[62503]: DEBUG nova.compute.manager [-] [instance: b9259ced-344a-42e5-835d-3713631a68c7] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 759.759863] env[62503]: DEBUG nova.network.neutron [-] [instance: b9259ced-344a-42e5-835d-3713631a68c7] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 759.764324] env[62503]: DEBUG nova.network.neutron [None req-7b8e1dad-5e5d-4734-b8ed-b199db0d06eb tempest-ServersNegativeTestMultiTenantJSON-701390068 tempest-ServersNegativeTestMultiTenantJSON-701390068-project-member] [instance: cf6fb485-9672-42b5-ac88-bbf5e0941393] Successfully created port: 24c577a7-e25f-4538-a47d-16b593401630 {{(pid=62503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 759.776530] env[62503]: DEBUG nova.network.neutron [-] [instance: b9259ced-344a-42e5-835d-3713631a68c7] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 759.943488] env[62503]: DEBUG nova.compute.manager [None req-7b8e1dad-5e5d-4734-b8ed-b199db0d06eb tempest-ServersNegativeTestMultiTenantJSON-701390068 tempest-ServersNegativeTestMultiTenantJSON-701390068-project-member] [instance: cf6fb485-9672-42b5-ac88-bbf5e0941393] Start building block device mappings for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2867}} [ 759.947344] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b764e2d-6aca-40d5-ae09-46fe33425281 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] [instance: d4990c66-63d5-43b0-8187-2074c99ccde2] Powering off the VM {{(pid=62503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 759.948254] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e7015523-6160-4152-be3f-ff4392a09c17 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.958105] env[62503]: DEBUG oslo_vmware.api [None req-0b764e2d-6aca-40d5-ae09-46fe33425281 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Waiting for the task: (returnval){ [ 759.958105] env[62503]: value = "task-1387741" [ 759.958105] env[62503]: _type = "Task" [ 759.958105] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 759.969126] env[62503]: DEBUG oslo_vmware.api [None req-0b764e2d-6aca-40d5-ae09-46fe33425281 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Task: {'id': task-1387741, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 760.249559] env[62503]: DEBUG nova.network.neutron [req-32f4cb43-9113-487c-a067-058cfa750949 req-8bab0a6b-2cec-48ee-9207-f2f05161b8be service nova] [instance: b9259ced-344a-42e5-835d-3713631a68c7] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 760.278601] env[62503]: DEBUG nova.network.neutron [-] [instance: b9259ced-344a-42e5-835d-3713631a68c7] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 760.300543] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c10d63d0-83f9-4eda-a8d3-b1bc1f5dece6 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.308837] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a640295-6da3-4e37-9ba1-91dc84160ac7 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.344191] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b76bb302-bd22-460e-b516-c9b4a91f0dac {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.349233] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e57a6669-41de-4620-8f9b-4e0a3be6661a {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.364890] env[62503]: DEBUG nova.compute.provider_tree [None req-be269e0c-079d-42ba-b169-f1c6935976f8 tempest-ServerMetadataTestJSON-2107799143 tempest-ServerMetadataTestJSON-2107799143-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 760.381651] env[62503]: DEBUG nova.network.neutron [req-32f4cb43-9113-487c-a067-058cfa750949 req-8bab0a6b-2cec-48ee-9207-f2f05161b8be service nova] [instance: b9259ced-344a-42e5-835d-3713631a68c7] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 760.467490] env[62503]: DEBUG oslo_vmware.api [None req-0b764e2d-6aca-40d5-ae09-46fe33425281 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Task: {'id': task-1387741, 'name': PowerOffVM_Task, 'duration_secs': 0.181381} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 760.467490] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b764e2d-6aca-40d5-ae09-46fe33425281 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] [instance: d4990c66-63d5-43b0-8187-2074c99ccde2] Powered off the VM {{(pid=62503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 760.467973] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-0b764e2d-6aca-40d5-ae09-46fe33425281 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] [instance: d4990c66-63d5-43b0-8187-2074c99ccde2] Destroying instance {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 760.468491] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7a287e8-daa8-4142-accb-4d8eaca75e0d {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.475076] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-0b764e2d-6aca-40d5-ae09-46fe33425281 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] [instance: d4990c66-63d5-43b0-8187-2074c99ccde2] Unregistering the VM {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 760.475294] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-582f4f7c-7d47-4cf1-9469-7f40e9d6000c {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.498752] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-0b764e2d-6aca-40d5-ae09-46fe33425281 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] [instance: d4990c66-63d5-43b0-8187-2074c99ccde2] Unregistered the VM {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 760.498993] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-0b764e2d-6aca-40d5-ae09-46fe33425281 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] [instance: d4990c66-63d5-43b0-8187-2074c99ccde2] Deleting contents of the VM from datastore datastore2 {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 760.499184] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-0b764e2d-6aca-40d5-ae09-46fe33425281 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Deleting the datastore file [datastore2] d4990c66-63d5-43b0-8187-2074c99ccde2 {{(pid=62503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 760.499426] env[62503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-241687cc-6a95-4c78-9070-421129af7995 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.506104] env[62503]: DEBUG oslo_vmware.api [None req-0b764e2d-6aca-40d5-ae09-46fe33425281 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Waiting for the task: (returnval){ [ 760.506104] env[62503]: value = "task-1387743" [ 760.506104] env[62503]: _type = "Task" [ 760.506104] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 760.513453] env[62503]: DEBUG oslo_vmware.api [None req-0b764e2d-6aca-40d5-ae09-46fe33425281 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Task: {'id': task-1387743, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 760.789665] env[62503]: ERROR nova.compute.manager [None req-7b8e1dad-5e5d-4734-b8ed-b199db0d06eb tempest-ServersNegativeTestMultiTenantJSON-701390068 tempest-ServersNegativeTestMultiTenantJSON-701390068-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 24c577a7-e25f-4538-a47d-16b593401630, please check neutron logs for more information. [ 760.789665] env[62503]: ERROR nova.compute.manager Traceback (most recent call last): [ 760.789665] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 760.789665] env[62503]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 760.789665] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 760.789665] env[62503]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 760.789665] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 760.789665] env[62503]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 760.789665] env[62503]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 760.789665] env[62503]: ERROR nova.compute.manager self.force_reraise() [ 760.789665] env[62503]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 760.789665] env[62503]: ERROR nova.compute.manager raise self.value [ 760.789665] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 760.789665] env[62503]: ERROR nova.compute.manager updated_port = self._update_port( [ 760.789665] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 760.789665] env[62503]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 760.790407] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 760.790407] env[62503]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 760.790407] env[62503]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 24c577a7-e25f-4538-a47d-16b593401630, please check neutron logs for more information. [ 760.790407] env[62503]: ERROR nova.compute.manager [ 760.790407] env[62503]: Traceback (most recent call last): [ 760.790407] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 760.790407] env[62503]: listener.cb(fileno) [ 760.790407] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 760.790407] env[62503]: result = function(*args, **kwargs) [ 760.790407] env[62503]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 760.790407] env[62503]: return func(*args, **kwargs) [ 760.790407] env[62503]: File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 760.790407] env[62503]: raise e [ 760.790407] env[62503]: File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 760.790407] env[62503]: nwinfo = self.network_api.allocate_for_instance( [ 760.790407] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 760.790407] env[62503]: created_port_ids = self._update_ports_for_instance( [ 760.790407] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 760.790407] env[62503]: with excutils.save_and_reraise_exception(): [ 760.790407] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 760.790407] env[62503]: self.force_reraise() [ 760.790407] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 760.790407] env[62503]: raise self.value [ 760.790407] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 760.790407] env[62503]: updated_port = self._update_port( [ 760.790407] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 760.790407] env[62503]: _ensure_no_port_binding_failure(port) [ 760.790407] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 760.790407] env[62503]: raise exception.PortBindingFailed(port_id=port['id']) [ 760.791800] env[62503]: nova.exception.PortBindingFailed: Binding failed for port 24c577a7-e25f-4538-a47d-16b593401630, please check neutron logs for more information. [ 760.791800] env[62503]: Removing descriptor: 16 [ 760.791800] env[62503]: INFO nova.compute.manager [-] [instance: b9259ced-344a-42e5-835d-3713631a68c7] Took 1.03 seconds to deallocate network for instance. [ 760.792518] env[62503]: DEBUG nova.compute.claims [None req-5f6b79ba-d091-4f79-8bf5-714be49e7af9 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] [instance: b9259ced-344a-42e5-835d-3713631a68c7] Aborting claim: {{(pid=62503) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 760.792699] env[62503]: DEBUG oslo_concurrency.lockutils [None req-5f6b79ba-d091-4f79-8bf5-714be49e7af9 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 760.851019] env[62503]: DEBUG nova.compute.manager [req-d49119ba-faa7-4f05-9c49-865d8d3e29ce req-299760a1-c2c9-485e-aff7-38fa51f9304e service nova] [instance: cf6fb485-9672-42b5-ac88-bbf5e0941393] Received event network-changed-24c577a7-e25f-4538-a47d-16b593401630 {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 760.851977] env[62503]: DEBUG nova.compute.manager [req-d49119ba-faa7-4f05-9c49-865d8d3e29ce req-299760a1-c2c9-485e-aff7-38fa51f9304e service nova] [instance: cf6fb485-9672-42b5-ac88-bbf5e0941393] Refreshing instance network info cache due to event network-changed-24c577a7-e25f-4538-a47d-16b593401630. {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11432}} [ 760.852245] env[62503]: DEBUG oslo_concurrency.lockutils [req-d49119ba-faa7-4f05-9c49-865d8d3e29ce req-299760a1-c2c9-485e-aff7-38fa51f9304e service nova] Acquiring lock "refresh_cache-cf6fb485-9672-42b5-ac88-bbf5e0941393" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 760.852405] env[62503]: DEBUG oslo_concurrency.lockutils [req-d49119ba-faa7-4f05-9c49-865d8d3e29ce req-299760a1-c2c9-485e-aff7-38fa51f9304e service nova] Acquired lock "refresh_cache-cf6fb485-9672-42b5-ac88-bbf5e0941393" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 760.852588] env[62503]: DEBUG nova.network.neutron [req-d49119ba-faa7-4f05-9c49-865d8d3e29ce req-299760a1-c2c9-485e-aff7-38fa51f9304e service nova] [instance: cf6fb485-9672-42b5-ac88-bbf5e0941393] Refreshing network info cache for port 24c577a7-e25f-4538-a47d-16b593401630 {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 760.866823] env[62503]: DEBUG nova.scheduler.client.report [None req-be269e0c-079d-42ba-b169-f1c6935976f8 tempest-ServerMetadataTestJSON-2107799143 tempest-ServerMetadataTestJSON-2107799143-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 760.883702] env[62503]: DEBUG oslo_concurrency.lockutils [req-32f4cb43-9113-487c-a067-058cfa750949 req-8bab0a6b-2cec-48ee-9207-f2f05161b8be service nova] Releasing lock "refresh_cache-b9259ced-344a-42e5-835d-3713631a68c7" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 760.884009] env[62503]: DEBUG nova.compute.manager [req-32f4cb43-9113-487c-a067-058cfa750949 req-8bab0a6b-2cec-48ee-9207-f2f05161b8be service nova] [instance: b9259ced-344a-42e5-835d-3713631a68c7] Received event network-vif-deleted-cb70e532-2d98-4792-8aa5-ace14414265b {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 760.953257] env[62503]: DEBUG nova.compute.manager [None req-7b8e1dad-5e5d-4734-b8ed-b199db0d06eb tempest-ServersNegativeTestMultiTenantJSON-701390068 tempest-ServersNegativeTestMultiTenantJSON-701390068-project-member] [instance: cf6fb485-9672-42b5-ac88-bbf5e0941393] Start spawning the instance on the hypervisor. {{(pid=62503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2641}} [ 760.979021] env[62503]: DEBUG nova.virt.hardware [None req-7b8e1dad-5e5d-4734-b8ed-b199db0d06eb tempest-ServersNegativeTestMultiTenantJSON-701390068 tempest-ServersNegativeTestMultiTenantJSON-701390068-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:26:20Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:26:03Z,direct_url=,disk_format='vmdk',id=8150ca02-f879-471d-8913-459408f127a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26d9ee75d25b4018b8bb1fb11c8bd98c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:26:04Z,virtual_size=,visibility=), allow threads: False {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 760.979021] env[62503]: DEBUG nova.virt.hardware [None req-7b8e1dad-5e5d-4734-b8ed-b199db0d06eb tempest-ServersNegativeTestMultiTenantJSON-701390068 tempest-ServersNegativeTestMultiTenantJSON-701390068-project-member] Flavor limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 760.979021] env[62503]: DEBUG nova.virt.hardware [None req-7b8e1dad-5e5d-4734-b8ed-b199db0d06eb tempest-ServersNegativeTestMultiTenantJSON-701390068 tempest-ServersNegativeTestMultiTenantJSON-701390068-project-member] Image limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 760.979301] env[62503]: DEBUG nova.virt.hardware [None req-7b8e1dad-5e5d-4734-b8ed-b199db0d06eb tempest-ServersNegativeTestMultiTenantJSON-701390068 tempest-ServersNegativeTestMultiTenantJSON-701390068-project-member] Flavor pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 760.979301] env[62503]: DEBUG nova.virt.hardware [None req-7b8e1dad-5e5d-4734-b8ed-b199db0d06eb tempest-ServersNegativeTestMultiTenantJSON-701390068 tempest-ServersNegativeTestMultiTenantJSON-701390068-project-member] Image pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 760.979301] env[62503]: DEBUG nova.virt.hardware [None req-7b8e1dad-5e5d-4734-b8ed-b199db0d06eb tempest-ServersNegativeTestMultiTenantJSON-701390068 tempest-ServersNegativeTestMultiTenantJSON-701390068-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 760.979611] env[62503]: DEBUG nova.virt.hardware [None req-7b8e1dad-5e5d-4734-b8ed-b199db0d06eb tempest-ServersNegativeTestMultiTenantJSON-701390068 tempest-ServersNegativeTestMultiTenantJSON-701390068-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 760.979937] env[62503]: DEBUG nova.virt.hardware [None req-7b8e1dad-5e5d-4734-b8ed-b199db0d06eb tempest-ServersNegativeTestMultiTenantJSON-701390068 tempest-ServersNegativeTestMultiTenantJSON-701390068-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 760.980287] env[62503]: DEBUG nova.virt.hardware [None req-7b8e1dad-5e5d-4734-b8ed-b199db0d06eb tempest-ServersNegativeTestMultiTenantJSON-701390068 tempest-ServersNegativeTestMultiTenantJSON-701390068-project-member] Got 1 possible topologies {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 760.980617] env[62503]: DEBUG nova.virt.hardware [None req-7b8e1dad-5e5d-4734-b8ed-b199db0d06eb tempest-ServersNegativeTestMultiTenantJSON-701390068 tempest-ServersNegativeTestMultiTenantJSON-701390068-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 760.981595] env[62503]: DEBUG nova.virt.hardware [None req-7b8e1dad-5e5d-4734-b8ed-b199db0d06eb tempest-ServersNegativeTestMultiTenantJSON-701390068 tempest-ServersNegativeTestMultiTenantJSON-701390068-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 760.981909] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad578206-253b-46b0-9e02-acafb8d66362 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.990627] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4da8105-7e75-4091-89e8-f94127598858 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.004437] env[62503]: ERROR nova.compute.manager [None req-7b8e1dad-5e5d-4734-b8ed-b199db0d06eb tempest-ServersNegativeTestMultiTenantJSON-701390068 tempest-ServersNegativeTestMultiTenantJSON-701390068-project-member] [instance: cf6fb485-9672-42b5-ac88-bbf5e0941393] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 24c577a7-e25f-4538-a47d-16b593401630, please check neutron logs for more information. [ 761.004437] env[62503]: ERROR nova.compute.manager [instance: cf6fb485-9672-42b5-ac88-bbf5e0941393] Traceback (most recent call last): [ 761.004437] env[62503]: ERROR nova.compute.manager [instance: cf6fb485-9672-42b5-ac88-bbf5e0941393] File "/opt/stack/nova/nova/compute/manager.py", line 2897, in _build_resources [ 761.004437] env[62503]: ERROR nova.compute.manager [instance: cf6fb485-9672-42b5-ac88-bbf5e0941393] yield resources [ 761.004437] env[62503]: ERROR nova.compute.manager [instance: cf6fb485-9672-42b5-ac88-bbf5e0941393] File "/opt/stack/nova/nova/compute/manager.py", line 2644, in _build_and_run_instance [ 761.004437] env[62503]: ERROR nova.compute.manager [instance: cf6fb485-9672-42b5-ac88-bbf5e0941393] self.driver.spawn(context, instance, image_meta, [ 761.004437] env[62503]: ERROR nova.compute.manager [instance: cf6fb485-9672-42b5-ac88-bbf5e0941393] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 761.004437] env[62503]: ERROR nova.compute.manager [instance: cf6fb485-9672-42b5-ac88-bbf5e0941393] self._vmops.spawn(context, instance, image_meta, injected_files, [ 761.004437] env[62503]: ERROR nova.compute.manager [instance: cf6fb485-9672-42b5-ac88-bbf5e0941393] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 761.004437] env[62503]: ERROR nova.compute.manager [instance: cf6fb485-9672-42b5-ac88-bbf5e0941393] vm_ref = self.build_virtual_machine(instance, [ 761.004437] env[62503]: ERROR nova.compute.manager [instance: cf6fb485-9672-42b5-ac88-bbf5e0941393] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 761.004863] env[62503]: ERROR nova.compute.manager [instance: cf6fb485-9672-42b5-ac88-bbf5e0941393] vif_infos = vmwarevif.get_vif_info(self._session, [ 761.004863] env[62503]: ERROR nova.compute.manager [instance: cf6fb485-9672-42b5-ac88-bbf5e0941393] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 761.004863] env[62503]: ERROR nova.compute.manager [instance: cf6fb485-9672-42b5-ac88-bbf5e0941393] for vif in network_info: [ 761.004863] env[62503]: ERROR nova.compute.manager [instance: cf6fb485-9672-42b5-ac88-bbf5e0941393] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 761.004863] env[62503]: ERROR nova.compute.manager [instance: cf6fb485-9672-42b5-ac88-bbf5e0941393] return self._sync_wrapper(fn, *args, **kwargs) [ 761.004863] env[62503]: ERROR nova.compute.manager [instance: cf6fb485-9672-42b5-ac88-bbf5e0941393] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 761.004863] env[62503]: ERROR nova.compute.manager [instance: cf6fb485-9672-42b5-ac88-bbf5e0941393] self.wait() [ 761.004863] env[62503]: ERROR nova.compute.manager [instance: cf6fb485-9672-42b5-ac88-bbf5e0941393] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 761.004863] env[62503]: ERROR nova.compute.manager [instance: cf6fb485-9672-42b5-ac88-bbf5e0941393] self[:] = self._gt.wait() [ 761.004863] env[62503]: ERROR nova.compute.manager [instance: cf6fb485-9672-42b5-ac88-bbf5e0941393] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 761.004863] env[62503]: ERROR nova.compute.manager [instance: cf6fb485-9672-42b5-ac88-bbf5e0941393] return self._exit_event.wait() [ 761.004863] env[62503]: ERROR nova.compute.manager [instance: cf6fb485-9672-42b5-ac88-bbf5e0941393] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 761.004863] env[62503]: ERROR nova.compute.manager [instance: cf6fb485-9672-42b5-ac88-bbf5e0941393] current.throw(*self._exc) [ 761.005304] env[62503]: ERROR nova.compute.manager [instance: cf6fb485-9672-42b5-ac88-bbf5e0941393] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 761.005304] env[62503]: ERROR nova.compute.manager [instance: cf6fb485-9672-42b5-ac88-bbf5e0941393] result = function(*args, **kwargs) [ 761.005304] env[62503]: ERROR nova.compute.manager [instance: cf6fb485-9672-42b5-ac88-bbf5e0941393] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 761.005304] env[62503]: ERROR nova.compute.manager [instance: cf6fb485-9672-42b5-ac88-bbf5e0941393] return func(*args, **kwargs) [ 761.005304] env[62503]: ERROR nova.compute.manager [instance: cf6fb485-9672-42b5-ac88-bbf5e0941393] File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 761.005304] env[62503]: ERROR nova.compute.manager [instance: cf6fb485-9672-42b5-ac88-bbf5e0941393] raise e [ 761.005304] env[62503]: ERROR nova.compute.manager [instance: cf6fb485-9672-42b5-ac88-bbf5e0941393] File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 761.005304] env[62503]: ERROR nova.compute.manager [instance: cf6fb485-9672-42b5-ac88-bbf5e0941393] nwinfo = self.network_api.allocate_for_instance( [ 761.005304] env[62503]: ERROR nova.compute.manager [instance: cf6fb485-9672-42b5-ac88-bbf5e0941393] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 761.005304] env[62503]: ERROR nova.compute.manager [instance: cf6fb485-9672-42b5-ac88-bbf5e0941393] created_port_ids = self._update_ports_for_instance( [ 761.005304] env[62503]: ERROR nova.compute.manager [instance: cf6fb485-9672-42b5-ac88-bbf5e0941393] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 761.005304] env[62503]: ERROR nova.compute.manager [instance: cf6fb485-9672-42b5-ac88-bbf5e0941393] with excutils.save_and_reraise_exception(): [ 761.005304] env[62503]: ERROR nova.compute.manager [instance: cf6fb485-9672-42b5-ac88-bbf5e0941393] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 761.005674] env[62503]: ERROR nova.compute.manager [instance: cf6fb485-9672-42b5-ac88-bbf5e0941393] self.force_reraise() [ 761.005674] env[62503]: ERROR nova.compute.manager [instance: cf6fb485-9672-42b5-ac88-bbf5e0941393] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 761.005674] env[62503]: ERROR nova.compute.manager [instance: cf6fb485-9672-42b5-ac88-bbf5e0941393] raise self.value [ 761.005674] env[62503]: ERROR nova.compute.manager [instance: cf6fb485-9672-42b5-ac88-bbf5e0941393] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 761.005674] env[62503]: ERROR nova.compute.manager [instance: cf6fb485-9672-42b5-ac88-bbf5e0941393] updated_port = self._update_port( [ 761.005674] env[62503]: ERROR nova.compute.manager [instance: cf6fb485-9672-42b5-ac88-bbf5e0941393] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 761.005674] env[62503]: ERROR nova.compute.manager [instance: cf6fb485-9672-42b5-ac88-bbf5e0941393] _ensure_no_port_binding_failure(port) [ 761.005674] env[62503]: ERROR nova.compute.manager [instance: cf6fb485-9672-42b5-ac88-bbf5e0941393] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 761.005674] env[62503]: ERROR nova.compute.manager [instance: cf6fb485-9672-42b5-ac88-bbf5e0941393] raise exception.PortBindingFailed(port_id=port['id']) [ 761.005674] env[62503]: ERROR nova.compute.manager [instance: cf6fb485-9672-42b5-ac88-bbf5e0941393] nova.exception.PortBindingFailed: Binding failed for port 24c577a7-e25f-4538-a47d-16b593401630, please check neutron logs for more information. [ 761.005674] env[62503]: ERROR nova.compute.manager [instance: cf6fb485-9672-42b5-ac88-bbf5e0941393] [ 761.005674] env[62503]: INFO nova.compute.manager [None req-7b8e1dad-5e5d-4734-b8ed-b199db0d06eb tempest-ServersNegativeTestMultiTenantJSON-701390068 tempest-ServersNegativeTestMultiTenantJSON-701390068-project-member] [instance: cf6fb485-9672-42b5-ac88-bbf5e0941393] Terminating instance [ 761.009257] env[62503]: DEBUG oslo_concurrency.lockutils [None req-7b8e1dad-5e5d-4734-b8ed-b199db0d06eb tempest-ServersNegativeTestMultiTenantJSON-701390068 tempest-ServersNegativeTestMultiTenantJSON-701390068-project-member] Acquiring lock "refresh_cache-cf6fb485-9672-42b5-ac88-bbf5e0941393" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 761.014041] env[62503]: DEBUG oslo_vmware.api [None req-0b764e2d-6aca-40d5-ae09-46fe33425281 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Task: {'id': task-1387743, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.093617} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 761.014206] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-0b764e2d-6aca-40d5-ae09-46fe33425281 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Deleted the datastore file {{(pid=62503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 761.014383] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-0b764e2d-6aca-40d5-ae09-46fe33425281 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] [instance: d4990c66-63d5-43b0-8187-2074c99ccde2] Deleted contents of the VM from datastore datastore2 {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 761.014555] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-0b764e2d-6aca-40d5-ae09-46fe33425281 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] [instance: d4990c66-63d5-43b0-8187-2074c99ccde2] Instance destroyed {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 761.370941] env[62503]: DEBUG nova.network.neutron [req-d49119ba-faa7-4f05-9c49-865d8d3e29ce req-299760a1-c2c9-485e-aff7-38fa51f9304e service nova] [instance: cf6fb485-9672-42b5-ac88-bbf5e0941393] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 761.373645] env[62503]: DEBUG oslo_concurrency.lockutils [None req-be269e0c-079d-42ba-b169-f1c6935976f8 tempest-ServerMetadataTestJSON-2107799143 tempest-ServerMetadataTestJSON-2107799143-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.448s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 761.374133] env[62503]: DEBUG nova.compute.manager [None req-be269e0c-079d-42ba-b169-f1c6935976f8 tempest-ServerMetadataTestJSON-2107799143 tempest-ServerMetadataTestJSON-2107799143-project-member] [instance: ff56659a-18f8-44c5-ab10-872e636a9357] Start building networks asynchronously for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2832}} [ 761.377254] env[62503]: DEBUG oslo_concurrency.lockutils [None req-fc106d5b-533d-42f2-9e23-d15b663f98ba tempest-ServerDiagnosticsNegativeTest-1726197908 tempest-ServerDiagnosticsNegativeTest-1726197908-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.049s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 761.378824] env[62503]: INFO nova.compute.claims [None req-fc106d5b-533d-42f2-9e23-d15b663f98ba tempest-ServerDiagnosticsNegativeTest-1726197908 tempest-ServerDiagnosticsNegativeTest-1726197908-project-member] [instance: 529e6f8e-49b9-46a7-a09f-17238522f7bc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 761.465355] env[62503]: DEBUG nova.network.neutron [req-d49119ba-faa7-4f05-9c49-865d8d3e29ce req-299760a1-c2c9-485e-aff7-38fa51f9304e service nova] [instance: cf6fb485-9672-42b5-ac88-bbf5e0941393] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 761.879515] env[62503]: DEBUG nova.compute.utils [None req-be269e0c-079d-42ba-b169-f1c6935976f8 tempest-ServerMetadataTestJSON-2107799143 tempest-ServerMetadataTestJSON-2107799143-project-member] Using /dev/sd instead of None {{(pid=62503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 761.881208] env[62503]: DEBUG nova.compute.manager [None req-be269e0c-079d-42ba-b169-f1c6935976f8 tempest-ServerMetadataTestJSON-2107799143 tempest-ServerMetadataTestJSON-2107799143-project-member] [instance: ff56659a-18f8-44c5-ab10-872e636a9357] Allocating IP information in the background. {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 761.881208] env[62503]: DEBUG nova.network.neutron [None req-be269e0c-079d-42ba-b169-f1c6935976f8 tempest-ServerMetadataTestJSON-2107799143 tempest-ServerMetadataTestJSON-2107799143-project-member] [instance: ff56659a-18f8-44c5-ab10-872e636a9357] allocate_for_instance() {{(pid=62503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 761.925041] env[62503]: DEBUG nova.policy [None req-be269e0c-079d-42ba-b169-f1c6935976f8 tempest-ServerMetadataTestJSON-2107799143 tempest-ServerMetadataTestJSON-2107799143-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7cb34863fc794bc4b69f8d6873e1ed7d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd1106bb5c01a4c599b8b5bff2802f5ea', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62503) authorize /opt/stack/nova/nova/policy.py:201}} [ 761.967644] env[62503]: DEBUG oslo_concurrency.lockutils [req-d49119ba-faa7-4f05-9c49-865d8d3e29ce req-299760a1-c2c9-485e-aff7-38fa51f9304e service nova] Releasing lock "refresh_cache-cf6fb485-9672-42b5-ac88-bbf5e0941393" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 761.968257] env[62503]: DEBUG nova.compute.manager [req-d49119ba-faa7-4f05-9c49-865d8d3e29ce req-299760a1-c2c9-485e-aff7-38fa51f9304e service nova] [instance: cf6fb485-9672-42b5-ac88-bbf5e0941393] Received event network-vif-deleted-24c577a7-e25f-4538-a47d-16b593401630 {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 761.969015] env[62503]: DEBUG oslo_concurrency.lockutils [None req-7b8e1dad-5e5d-4734-b8ed-b199db0d06eb tempest-ServersNegativeTestMultiTenantJSON-701390068 tempest-ServersNegativeTestMultiTenantJSON-701390068-project-member] Acquired lock "refresh_cache-cf6fb485-9672-42b5-ac88-bbf5e0941393" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 761.969211] env[62503]: DEBUG nova.network.neutron [None req-7b8e1dad-5e5d-4734-b8ed-b199db0d06eb tempest-ServersNegativeTestMultiTenantJSON-701390068 tempest-ServersNegativeTestMultiTenantJSON-701390068-project-member] [instance: cf6fb485-9672-42b5-ac88-bbf5e0941393] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 762.045765] env[62503]: DEBUG nova.virt.hardware [None req-0b764e2d-6aca-40d5-ae09-46fe33425281 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:26:20Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:26:03Z,direct_url=,disk_format='vmdk',id=8150ca02-f879-471d-8913-459408f127a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26d9ee75d25b4018b8bb1fb11c8bd98c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:26:04Z,virtual_size=,visibility=), allow threads: False {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 762.046021] env[62503]: DEBUG nova.virt.hardware [None req-0b764e2d-6aca-40d5-ae09-46fe33425281 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Flavor limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 762.046276] env[62503]: DEBUG nova.virt.hardware [None req-0b764e2d-6aca-40d5-ae09-46fe33425281 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Image limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 762.046478] env[62503]: DEBUG nova.virt.hardware [None req-0b764e2d-6aca-40d5-ae09-46fe33425281 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Flavor pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 762.046644] env[62503]: DEBUG nova.virt.hardware [None req-0b764e2d-6aca-40d5-ae09-46fe33425281 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Image pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 762.046823] env[62503]: DEBUG nova.virt.hardware [None req-0b764e2d-6aca-40d5-ae09-46fe33425281 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 762.047044] env[62503]: DEBUG nova.virt.hardware [None req-0b764e2d-6aca-40d5-ae09-46fe33425281 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 762.047206] env[62503]: DEBUG nova.virt.hardware [None req-0b764e2d-6aca-40d5-ae09-46fe33425281 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 762.047375] env[62503]: DEBUG nova.virt.hardware [None req-0b764e2d-6aca-40d5-ae09-46fe33425281 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Got 1 possible topologies {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 762.047534] env[62503]: DEBUG nova.virt.hardware [None req-0b764e2d-6aca-40d5-ae09-46fe33425281 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 762.047704] env[62503]: DEBUG nova.virt.hardware [None req-0b764e2d-6aca-40d5-ae09-46fe33425281 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 762.048556] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05e48812-e96f-4d67-804f-39a368110929 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.056447] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-203972d0-fae2-41d6-b923-4bbefd83886d {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.070948] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-0b764e2d-6aca-40d5-ae09-46fe33425281 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] [instance: d4990c66-63d5-43b0-8187-2074c99ccde2] Instance VIF info [] {{(pid=62503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 762.077257] env[62503]: DEBUG oslo.service.loopingcall [None req-0b764e2d-6aca-40d5-ae09-46fe33425281 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 762.077492] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d4990c66-63d5-43b0-8187-2074c99ccde2] Creating VM on the ESX host {{(pid=62503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 762.077736] env[62503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0c9d47e4-3377-4706-807e-db5b2a7ce920 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.095793] env[62503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 762.095793] env[62503]: value = "task-1387744" [ 762.095793] env[62503]: _type = "Task" [ 762.095793] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 762.103502] env[62503]: DEBUG oslo_vmware.api [-] Task: {'id': task-1387744, 'name': CreateVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.202342] env[62503]: DEBUG nova.network.neutron [None req-be269e0c-079d-42ba-b169-f1c6935976f8 tempest-ServerMetadataTestJSON-2107799143 tempest-ServerMetadataTestJSON-2107799143-project-member] [instance: ff56659a-18f8-44c5-ab10-872e636a9357] Successfully created port: c1062fca-9848-40d4-bc22-18b2ce47b6c7 {{(pid=62503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 762.384945] env[62503]: DEBUG nova.compute.manager [None req-be269e0c-079d-42ba-b169-f1c6935976f8 tempest-ServerMetadataTestJSON-2107799143 tempest-ServerMetadataTestJSON-2107799143-project-member] [instance: ff56659a-18f8-44c5-ab10-872e636a9357] Start building block device mappings for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2867}} [ 762.490909] env[62503]: DEBUG nova.network.neutron [None req-7b8e1dad-5e5d-4734-b8ed-b199db0d06eb tempest-ServersNegativeTestMultiTenantJSON-701390068 tempest-ServersNegativeTestMultiTenantJSON-701390068-project-member] [instance: cf6fb485-9672-42b5-ac88-bbf5e0941393] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 762.593895] env[62503]: DEBUG nova.network.neutron [None req-7b8e1dad-5e5d-4734-b8ed-b199db0d06eb tempest-ServersNegativeTestMultiTenantJSON-701390068 tempest-ServersNegativeTestMultiTenantJSON-701390068-project-member] [instance: cf6fb485-9672-42b5-ac88-bbf5e0941393] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 762.612522] env[62503]: DEBUG oslo_vmware.api [-] Task: {'id': task-1387744, 'name': CreateVM_Task} progress is 99%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.747139] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2ec7bf2-adaa-4aa8-9eda-476aa75df4fe {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.755278] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e4810b2-31a5-4968-9630-27b7d55aeb7a {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.785686] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bb00cac-e704-4668-854e-a30f1cc680fb {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.792932] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1e2f933-ff7e-4a3b-97e2-7da7eceb2481 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.806347] env[62503]: DEBUG nova.compute.provider_tree [None req-fc106d5b-533d-42f2-9e23-d15b663f98ba tempest-ServerDiagnosticsNegativeTest-1726197908 tempest-ServerDiagnosticsNegativeTest-1726197908-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 763.100295] env[62503]: DEBUG oslo_concurrency.lockutils [None req-7b8e1dad-5e5d-4734-b8ed-b199db0d06eb tempest-ServersNegativeTestMultiTenantJSON-701390068 tempest-ServersNegativeTestMultiTenantJSON-701390068-project-member] Releasing lock "refresh_cache-cf6fb485-9672-42b5-ac88-bbf5e0941393" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 763.100625] env[62503]: DEBUG nova.compute.manager [None req-7b8e1dad-5e5d-4734-b8ed-b199db0d06eb tempest-ServersNegativeTestMultiTenantJSON-701390068 tempest-ServersNegativeTestMultiTenantJSON-701390068-project-member] [instance: cf6fb485-9672-42b5-ac88-bbf5e0941393] Start destroying the instance on the hypervisor. {{(pid=62503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3156}} [ 763.100815] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-7b8e1dad-5e5d-4734-b8ed-b199db0d06eb tempest-ServersNegativeTestMultiTenantJSON-701390068 tempest-ServersNegativeTestMultiTenantJSON-701390068-project-member] [instance: cf6fb485-9672-42b5-ac88-bbf5e0941393] Destroying instance {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 763.101265] env[62503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5d2e14aa-b237-42e3-9f39-697ac8bd87a7 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.112063] env[62503]: DEBUG oslo_vmware.api [-] Task: {'id': task-1387744, 'name': CreateVM_Task} progress is 99%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.115337] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a05604d-4c08-46a8-8d83-3fcd4f195166 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.140273] env[62503]: WARNING nova.virt.vmwareapi.vmops [None req-7b8e1dad-5e5d-4734-b8ed-b199db0d06eb tempest-ServersNegativeTestMultiTenantJSON-701390068 tempest-ServersNegativeTestMultiTenantJSON-701390068-project-member] [instance: cf6fb485-9672-42b5-ac88-bbf5e0941393] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance cf6fb485-9672-42b5-ac88-bbf5e0941393 could not be found. [ 763.140516] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-7b8e1dad-5e5d-4734-b8ed-b199db0d06eb tempest-ServersNegativeTestMultiTenantJSON-701390068 tempest-ServersNegativeTestMultiTenantJSON-701390068-project-member] [instance: cf6fb485-9672-42b5-ac88-bbf5e0941393] Instance destroyed {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 763.141025] env[62503]: INFO nova.compute.manager [None req-7b8e1dad-5e5d-4734-b8ed-b199db0d06eb tempest-ServersNegativeTestMultiTenantJSON-701390068 tempest-ServersNegativeTestMultiTenantJSON-701390068-project-member] [instance: cf6fb485-9672-42b5-ac88-bbf5e0941393] Took 0.04 seconds to destroy the instance on the hypervisor. [ 763.141384] env[62503]: DEBUG oslo.service.loopingcall [None req-7b8e1dad-5e5d-4734-b8ed-b199db0d06eb tempest-ServersNegativeTestMultiTenantJSON-701390068 tempest-ServersNegativeTestMultiTenantJSON-701390068-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 763.142414] env[62503]: DEBUG nova.compute.manager [-] [instance: cf6fb485-9672-42b5-ac88-bbf5e0941393] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 763.142414] env[62503]: DEBUG nova.network.neutron [-] [instance: cf6fb485-9672-42b5-ac88-bbf5e0941393] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 763.145038] env[62503]: DEBUG nova.compute.manager [req-35979088-c68b-491a-b70e-b21094476c3d req-52e43ccd-e7da-4414-b0e0-33cb3339fbb5 service nova] [instance: ff56659a-18f8-44c5-ab10-872e636a9357] Received event network-changed-c1062fca-9848-40d4-bc22-18b2ce47b6c7 {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 763.145223] env[62503]: DEBUG nova.compute.manager [req-35979088-c68b-491a-b70e-b21094476c3d req-52e43ccd-e7da-4414-b0e0-33cb3339fbb5 service nova] [instance: ff56659a-18f8-44c5-ab10-872e636a9357] Refreshing instance network info cache due to event network-changed-c1062fca-9848-40d4-bc22-18b2ce47b6c7. {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11432}} [ 763.145432] env[62503]: DEBUG oslo_concurrency.lockutils [req-35979088-c68b-491a-b70e-b21094476c3d req-52e43ccd-e7da-4414-b0e0-33cb3339fbb5 service nova] Acquiring lock "refresh_cache-ff56659a-18f8-44c5-ab10-872e636a9357" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 763.145574] env[62503]: DEBUG oslo_concurrency.lockutils [req-35979088-c68b-491a-b70e-b21094476c3d req-52e43ccd-e7da-4414-b0e0-33cb3339fbb5 service nova] Acquired lock "refresh_cache-ff56659a-18f8-44c5-ab10-872e636a9357" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 763.145886] env[62503]: DEBUG nova.network.neutron [req-35979088-c68b-491a-b70e-b21094476c3d req-52e43ccd-e7da-4414-b0e0-33cb3339fbb5 service nova] [instance: ff56659a-18f8-44c5-ab10-872e636a9357] Refreshing network info cache for port c1062fca-9848-40d4-bc22-18b2ce47b6c7 {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 763.164148] env[62503]: DEBUG nova.network.neutron [-] [instance: cf6fb485-9672-42b5-ac88-bbf5e0941393] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 763.309387] env[62503]: DEBUG nova.scheduler.client.report [None req-fc106d5b-533d-42f2-9e23-d15b663f98ba tempest-ServerDiagnosticsNegativeTest-1726197908 tempest-ServerDiagnosticsNegativeTest-1726197908-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 763.376876] env[62503]: ERROR nova.compute.manager [None req-be269e0c-079d-42ba-b169-f1c6935976f8 tempest-ServerMetadataTestJSON-2107799143 tempest-ServerMetadataTestJSON-2107799143-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port c1062fca-9848-40d4-bc22-18b2ce47b6c7, please check neutron logs for more information. [ 763.376876] env[62503]: ERROR nova.compute.manager Traceback (most recent call last): [ 763.376876] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 763.376876] env[62503]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 763.376876] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 763.376876] env[62503]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 763.376876] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 763.376876] env[62503]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 763.376876] env[62503]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 763.376876] env[62503]: ERROR nova.compute.manager self.force_reraise() [ 763.376876] env[62503]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 763.376876] env[62503]: ERROR nova.compute.manager raise self.value [ 763.376876] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 763.376876] env[62503]: ERROR nova.compute.manager updated_port = self._update_port( [ 763.376876] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 763.376876] env[62503]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 763.377563] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 763.377563] env[62503]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 763.377563] env[62503]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port c1062fca-9848-40d4-bc22-18b2ce47b6c7, please check neutron logs for more information. [ 763.377563] env[62503]: ERROR nova.compute.manager [ 763.377563] env[62503]: Traceback (most recent call last): [ 763.377563] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 763.377563] env[62503]: listener.cb(fileno) [ 763.377563] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 763.377563] env[62503]: result = function(*args, **kwargs) [ 763.377563] env[62503]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 763.377563] env[62503]: return func(*args, **kwargs) [ 763.377563] env[62503]: File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 763.377563] env[62503]: raise e [ 763.377563] env[62503]: File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 763.377563] env[62503]: nwinfo = self.network_api.allocate_for_instance( [ 763.377563] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 763.377563] env[62503]: created_port_ids = self._update_ports_for_instance( [ 763.377563] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 763.377563] env[62503]: with excutils.save_and_reraise_exception(): [ 763.377563] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 763.377563] env[62503]: self.force_reraise() [ 763.377563] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 763.377563] env[62503]: raise self.value [ 763.377563] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 763.377563] env[62503]: updated_port = self._update_port( [ 763.377563] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 763.377563] env[62503]: _ensure_no_port_binding_failure(port) [ 763.377563] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 763.377563] env[62503]: raise exception.PortBindingFailed(port_id=port['id']) [ 763.378457] env[62503]: nova.exception.PortBindingFailed: Binding failed for port c1062fca-9848-40d4-bc22-18b2ce47b6c7, please check neutron logs for more information. [ 763.378457] env[62503]: Removing descriptor: 16 [ 763.400348] env[62503]: DEBUG nova.compute.manager [None req-be269e0c-079d-42ba-b169-f1c6935976f8 tempest-ServerMetadataTestJSON-2107799143 tempest-ServerMetadataTestJSON-2107799143-project-member] [instance: ff56659a-18f8-44c5-ab10-872e636a9357] Start spawning the instance on the hypervisor. {{(pid=62503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2641}} [ 763.424126] env[62503]: DEBUG nova.virt.hardware [None req-be269e0c-079d-42ba-b169-f1c6935976f8 tempest-ServerMetadataTestJSON-2107799143 tempest-ServerMetadataTestJSON-2107799143-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:26:20Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:26:03Z,direct_url=,disk_format='vmdk',id=8150ca02-f879-471d-8913-459408f127a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26d9ee75d25b4018b8bb1fb11c8bd98c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:26:04Z,virtual_size=,visibility=), allow threads: False {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 763.424374] env[62503]: DEBUG nova.virt.hardware [None req-be269e0c-079d-42ba-b169-f1c6935976f8 tempest-ServerMetadataTestJSON-2107799143 tempest-ServerMetadataTestJSON-2107799143-project-member] Flavor limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 763.424528] env[62503]: DEBUG nova.virt.hardware [None req-be269e0c-079d-42ba-b169-f1c6935976f8 tempest-ServerMetadataTestJSON-2107799143 tempest-ServerMetadataTestJSON-2107799143-project-member] Image limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 763.424710] env[62503]: DEBUG nova.virt.hardware [None req-be269e0c-079d-42ba-b169-f1c6935976f8 tempest-ServerMetadataTestJSON-2107799143 tempest-ServerMetadataTestJSON-2107799143-project-member] Flavor pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 763.424856] env[62503]: DEBUG nova.virt.hardware [None req-be269e0c-079d-42ba-b169-f1c6935976f8 tempest-ServerMetadataTestJSON-2107799143 tempest-ServerMetadataTestJSON-2107799143-project-member] Image pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 763.425009] env[62503]: DEBUG nova.virt.hardware [None req-be269e0c-079d-42ba-b169-f1c6935976f8 tempest-ServerMetadataTestJSON-2107799143 tempest-ServerMetadataTestJSON-2107799143-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 763.425232] env[62503]: DEBUG nova.virt.hardware [None req-be269e0c-079d-42ba-b169-f1c6935976f8 tempest-ServerMetadataTestJSON-2107799143 tempest-ServerMetadataTestJSON-2107799143-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 763.425389] env[62503]: DEBUG nova.virt.hardware [None req-be269e0c-079d-42ba-b169-f1c6935976f8 tempest-ServerMetadataTestJSON-2107799143 tempest-ServerMetadataTestJSON-2107799143-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 763.425554] env[62503]: DEBUG nova.virt.hardware [None req-be269e0c-079d-42ba-b169-f1c6935976f8 tempest-ServerMetadataTestJSON-2107799143 tempest-ServerMetadataTestJSON-2107799143-project-member] Got 1 possible topologies {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 763.425714] env[62503]: DEBUG nova.virt.hardware [None req-be269e0c-079d-42ba-b169-f1c6935976f8 tempest-ServerMetadataTestJSON-2107799143 tempest-ServerMetadataTestJSON-2107799143-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 763.425885] env[62503]: DEBUG nova.virt.hardware [None req-be269e0c-079d-42ba-b169-f1c6935976f8 tempest-ServerMetadataTestJSON-2107799143 tempest-ServerMetadataTestJSON-2107799143-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 763.426746] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f929178-e672-4640-9af2-0e06277f526f {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.434345] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-290a0799-2679-4eba-9455-154bd4a0d00d {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.449013] env[62503]: ERROR nova.compute.manager [None req-be269e0c-079d-42ba-b169-f1c6935976f8 tempest-ServerMetadataTestJSON-2107799143 tempest-ServerMetadataTestJSON-2107799143-project-member] [instance: ff56659a-18f8-44c5-ab10-872e636a9357] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port c1062fca-9848-40d4-bc22-18b2ce47b6c7, please check neutron logs for more information. [ 763.449013] env[62503]: ERROR nova.compute.manager [instance: ff56659a-18f8-44c5-ab10-872e636a9357] Traceback (most recent call last): [ 763.449013] env[62503]: ERROR nova.compute.manager [instance: ff56659a-18f8-44c5-ab10-872e636a9357] File "/opt/stack/nova/nova/compute/manager.py", line 2897, in _build_resources [ 763.449013] env[62503]: ERROR nova.compute.manager [instance: ff56659a-18f8-44c5-ab10-872e636a9357] yield resources [ 763.449013] env[62503]: ERROR nova.compute.manager [instance: ff56659a-18f8-44c5-ab10-872e636a9357] File "/opt/stack/nova/nova/compute/manager.py", line 2644, in _build_and_run_instance [ 763.449013] env[62503]: ERROR nova.compute.manager [instance: ff56659a-18f8-44c5-ab10-872e636a9357] self.driver.spawn(context, instance, image_meta, [ 763.449013] env[62503]: ERROR nova.compute.manager [instance: ff56659a-18f8-44c5-ab10-872e636a9357] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 763.449013] env[62503]: ERROR nova.compute.manager [instance: ff56659a-18f8-44c5-ab10-872e636a9357] self._vmops.spawn(context, instance, image_meta, injected_files, [ 763.449013] env[62503]: ERROR nova.compute.manager [instance: ff56659a-18f8-44c5-ab10-872e636a9357] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 763.449013] env[62503]: ERROR nova.compute.manager [instance: ff56659a-18f8-44c5-ab10-872e636a9357] vm_ref = self.build_virtual_machine(instance, [ 763.449013] env[62503]: ERROR nova.compute.manager [instance: ff56659a-18f8-44c5-ab10-872e636a9357] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 763.449458] env[62503]: ERROR nova.compute.manager [instance: ff56659a-18f8-44c5-ab10-872e636a9357] vif_infos = vmwarevif.get_vif_info(self._session, [ 763.449458] env[62503]: ERROR nova.compute.manager [instance: ff56659a-18f8-44c5-ab10-872e636a9357] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 763.449458] env[62503]: ERROR nova.compute.manager [instance: ff56659a-18f8-44c5-ab10-872e636a9357] for vif in network_info: [ 763.449458] env[62503]: ERROR nova.compute.manager [instance: ff56659a-18f8-44c5-ab10-872e636a9357] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 763.449458] env[62503]: ERROR nova.compute.manager [instance: ff56659a-18f8-44c5-ab10-872e636a9357] return self._sync_wrapper(fn, *args, **kwargs) [ 763.449458] env[62503]: ERROR nova.compute.manager [instance: ff56659a-18f8-44c5-ab10-872e636a9357] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 763.449458] env[62503]: ERROR nova.compute.manager [instance: ff56659a-18f8-44c5-ab10-872e636a9357] self.wait() [ 763.449458] env[62503]: ERROR nova.compute.manager [instance: ff56659a-18f8-44c5-ab10-872e636a9357] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 763.449458] env[62503]: ERROR nova.compute.manager [instance: ff56659a-18f8-44c5-ab10-872e636a9357] self[:] = self._gt.wait() [ 763.449458] env[62503]: ERROR nova.compute.manager [instance: ff56659a-18f8-44c5-ab10-872e636a9357] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 763.449458] env[62503]: ERROR nova.compute.manager [instance: ff56659a-18f8-44c5-ab10-872e636a9357] return self._exit_event.wait() [ 763.449458] env[62503]: ERROR nova.compute.manager [instance: ff56659a-18f8-44c5-ab10-872e636a9357] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 763.449458] env[62503]: ERROR nova.compute.manager [instance: ff56659a-18f8-44c5-ab10-872e636a9357] current.throw(*self._exc) [ 763.449951] env[62503]: ERROR nova.compute.manager [instance: ff56659a-18f8-44c5-ab10-872e636a9357] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 763.449951] env[62503]: ERROR nova.compute.manager [instance: ff56659a-18f8-44c5-ab10-872e636a9357] result = function(*args, **kwargs) [ 763.449951] env[62503]: ERROR nova.compute.manager [instance: ff56659a-18f8-44c5-ab10-872e636a9357] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 763.449951] env[62503]: ERROR nova.compute.manager [instance: ff56659a-18f8-44c5-ab10-872e636a9357] return func(*args, **kwargs) [ 763.449951] env[62503]: ERROR nova.compute.manager [instance: ff56659a-18f8-44c5-ab10-872e636a9357] File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 763.449951] env[62503]: ERROR nova.compute.manager [instance: ff56659a-18f8-44c5-ab10-872e636a9357] raise e [ 763.449951] env[62503]: ERROR nova.compute.manager [instance: ff56659a-18f8-44c5-ab10-872e636a9357] File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 763.449951] env[62503]: ERROR nova.compute.manager [instance: ff56659a-18f8-44c5-ab10-872e636a9357] nwinfo = self.network_api.allocate_for_instance( [ 763.449951] env[62503]: ERROR nova.compute.manager [instance: ff56659a-18f8-44c5-ab10-872e636a9357] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 763.449951] env[62503]: ERROR nova.compute.manager [instance: ff56659a-18f8-44c5-ab10-872e636a9357] created_port_ids = self._update_ports_for_instance( [ 763.449951] env[62503]: ERROR nova.compute.manager [instance: ff56659a-18f8-44c5-ab10-872e636a9357] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 763.449951] env[62503]: ERROR nova.compute.manager [instance: ff56659a-18f8-44c5-ab10-872e636a9357] with excutils.save_and_reraise_exception(): [ 763.449951] env[62503]: ERROR nova.compute.manager [instance: ff56659a-18f8-44c5-ab10-872e636a9357] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 763.450416] env[62503]: ERROR nova.compute.manager [instance: ff56659a-18f8-44c5-ab10-872e636a9357] self.force_reraise() [ 763.450416] env[62503]: ERROR nova.compute.manager [instance: ff56659a-18f8-44c5-ab10-872e636a9357] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 763.450416] env[62503]: ERROR nova.compute.manager [instance: ff56659a-18f8-44c5-ab10-872e636a9357] raise self.value [ 763.450416] env[62503]: ERROR nova.compute.manager [instance: ff56659a-18f8-44c5-ab10-872e636a9357] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 763.450416] env[62503]: ERROR nova.compute.manager [instance: ff56659a-18f8-44c5-ab10-872e636a9357] updated_port = self._update_port( [ 763.450416] env[62503]: ERROR nova.compute.manager [instance: ff56659a-18f8-44c5-ab10-872e636a9357] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 763.450416] env[62503]: ERROR nova.compute.manager [instance: ff56659a-18f8-44c5-ab10-872e636a9357] _ensure_no_port_binding_failure(port) [ 763.450416] env[62503]: ERROR nova.compute.manager [instance: ff56659a-18f8-44c5-ab10-872e636a9357] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 763.450416] env[62503]: ERROR nova.compute.manager [instance: ff56659a-18f8-44c5-ab10-872e636a9357] raise exception.PortBindingFailed(port_id=port['id']) [ 763.450416] env[62503]: ERROR nova.compute.manager [instance: ff56659a-18f8-44c5-ab10-872e636a9357] nova.exception.PortBindingFailed: Binding failed for port c1062fca-9848-40d4-bc22-18b2ce47b6c7, please check neutron logs for more information. [ 763.450416] env[62503]: ERROR nova.compute.manager [instance: ff56659a-18f8-44c5-ab10-872e636a9357] [ 763.450416] env[62503]: INFO nova.compute.manager [None req-be269e0c-079d-42ba-b169-f1c6935976f8 tempest-ServerMetadataTestJSON-2107799143 tempest-ServerMetadataTestJSON-2107799143-project-member] [instance: ff56659a-18f8-44c5-ab10-872e636a9357] Terminating instance [ 763.451320] env[62503]: DEBUG oslo_concurrency.lockutils [None req-be269e0c-079d-42ba-b169-f1c6935976f8 tempest-ServerMetadataTestJSON-2107799143 tempest-ServerMetadataTestJSON-2107799143-project-member] Acquiring lock "refresh_cache-ff56659a-18f8-44c5-ab10-872e636a9357" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 763.612023] env[62503]: DEBUG oslo_vmware.api [-] Task: {'id': task-1387744, 'name': CreateVM_Task, 'duration_secs': 1.263777} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 763.612023] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d4990c66-63d5-43b0-8187-2074c99ccde2] Created VM on the ESX host {{(pid=62503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 763.612023] env[62503]: DEBUG oslo_concurrency.lockutils [None req-0b764e2d-6aca-40d5-ae09-46fe33425281 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 763.612023] env[62503]: DEBUG oslo_concurrency.lockutils [None req-0b764e2d-6aca-40d5-ae09-46fe33425281 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 763.612023] env[62503]: DEBUG oslo_concurrency.lockutils [None req-0b764e2d-6aca-40d5-ae09-46fe33425281 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 763.612311] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8741c0d3-1b7a-425b-abfb-ee0d2ac3633b {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.616654] env[62503]: DEBUG oslo_vmware.api [None req-0b764e2d-6aca-40d5-ae09-46fe33425281 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Waiting for the task: (returnval){ [ 763.616654] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]528939ab-2226-4723-f627-cc32ec7c1328" [ 763.616654] env[62503]: _type = "Task" [ 763.616654] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 763.624579] env[62503]: DEBUG oslo_vmware.api [None req-0b764e2d-6aca-40d5-ae09-46fe33425281 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]528939ab-2226-4723-f627-cc32ec7c1328, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.662323] env[62503]: DEBUG nova.network.neutron [req-35979088-c68b-491a-b70e-b21094476c3d req-52e43ccd-e7da-4414-b0e0-33cb3339fbb5 service nova] [instance: ff56659a-18f8-44c5-ab10-872e636a9357] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 763.666585] env[62503]: DEBUG nova.network.neutron [-] [instance: cf6fb485-9672-42b5-ac88-bbf5e0941393] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 763.763567] env[62503]: DEBUG nova.network.neutron [req-35979088-c68b-491a-b70e-b21094476c3d req-52e43ccd-e7da-4414-b0e0-33cb3339fbb5 service nova] [instance: ff56659a-18f8-44c5-ab10-872e636a9357] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 763.815839] env[62503]: DEBUG oslo_concurrency.lockutils [None req-fc106d5b-533d-42f2-9e23-d15b663f98ba tempest-ServerDiagnosticsNegativeTest-1726197908 tempest-ServerDiagnosticsNegativeTest-1726197908-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.438s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 763.817139] env[62503]: DEBUG nova.compute.manager [None req-fc106d5b-533d-42f2-9e23-d15b663f98ba tempest-ServerDiagnosticsNegativeTest-1726197908 tempest-ServerDiagnosticsNegativeTest-1726197908-project-member] [instance: 529e6f8e-49b9-46a7-a09f-17238522f7bc] Start building networks asynchronously for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2832}} [ 763.819011] env[62503]: DEBUG oslo_concurrency.lockutils [None req-d48c1997-54b2-410a-b1fd-f52e7748bdfd tempest-ServerGroupTestJSON-1883967840 tempest-ServerGroupTestJSON-1883967840-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 17.146s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 764.126767] env[62503]: DEBUG oslo_vmware.api [None req-0b764e2d-6aca-40d5-ae09-46fe33425281 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]528939ab-2226-4723-f627-cc32ec7c1328, 'name': SearchDatastore_Task, 'duration_secs': 0.009084} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 764.127091] env[62503]: DEBUG oslo_concurrency.lockutils [None req-0b764e2d-6aca-40d5-ae09-46fe33425281 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 764.127306] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-0b764e2d-6aca-40d5-ae09-46fe33425281 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] [instance: d4990c66-63d5-43b0-8187-2074c99ccde2] Processing image 8150ca02-f879-471d-8913-459408f127a1 {{(pid=62503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 764.127537] env[62503]: DEBUG oslo_concurrency.lockutils [None req-0b764e2d-6aca-40d5-ae09-46fe33425281 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 764.127880] env[62503]: DEBUG oslo_concurrency.lockutils [None req-0b764e2d-6aca-40d5-ae09-46fe33425281 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 764.127880] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-0b764e2d-6aca-40d5-ae09-46fe33425281 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 764.128113] env[62503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-feda8c63-2955-467a-914f-719b815c85be {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.136027] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-0b764e2d-6aca-40d5-ae09-46fe33425281 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 764.136201] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-0b764e2d-6aca-40d5-ae09-46fe33425281 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 764.136879] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-61f1ac8c-c919-42f6-ad18-730a4d4ab3d5 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.141719] env[62503]: DEBUG oslo_vmware.api [None req-0b764e2d-6aca-40d5-ae09-46fe33425281 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Waiting for the task: (returnval){ [ 764.141719] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]524c9bc9-ba8b-a4b3-2336-8023c9c1a7d3" [ 764.141719] env[62503]: _type = "Task" [ 764.141719] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 764.149191] env[62503]: DEBUG oslo_vmware.api [None req-0b764e2d-6aca-40d5-ae09-46fe33425281 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]524c9bc9-ba8b-a4b3-2336-8023c9c1a7d3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.168729] env[62503]: INFO nova.compute.manager [-] [instance: cf6fb485-9672-42b5-ac88-bbf5e0941393] Took 1.03 seconds to deallocate network for instance. [ 764.171478] env[62503]: DEBUG nova.compute.claims [None req-7b8e1dad-5e5d-4734-b8ed-b199db0d06eb tempest-ServersNegativeTestMultiTenantJSON-701390068 tempest-ServersNegativeTestMultiTenantJSON-701390068-project-member] [instance: cf6fb485-9672-42b5-ac88-bbf5e0941393] Aborting claim: {{(pid=62503) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 764.171692] env[62503]: DEBUG oslo_concurrency.lockutils [None req-7b8e1dad-5e5d-4734-b8ed-b199db0d06eb tempest-ServersNegativeTestMultiTenantJSON-701390068 tempest-ServersNegativeTestMultiTenantJSON-701390068-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 764.266636] env[62503]: DEBUG oslo_concurrency.lockutils [req-35979088-c68b-491a-b70e-b21094476c3d req-52e43ccd-e7da-4414-b0e0-33cb3339fbb5 service nova] Releasing lock "refresh_cache-ff56659a-18f8-44c5-ab10-872e636a9357" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 764.266779] env[62503]: DEBUG oslo_concurrency.lockutils [None req-be269e0c-079d-42ba-b169-f1c6935976f8 tempest-ServerMetadataTestJSON-2107799143 tempest-ServerMetadataTestJSON-2107799143-project-member] Acquired lock "refresh_cache-ff56659a-18f8-44c5-ab10-872e636a9357" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 764.266906] env[62503]: DEBUG nova.network.neutron [None req-be269e0c-079d-42ba-b169-f1c6935976f8 tempest-ServerMetadataTestJSON-2107799143 tempest-ServerMetadataTestJSON-2107799143-project-member] [instance: ff56659a-18f8-44c5-ab10-872e636a9357] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 764.324230] env[62503]: DEBUG nova.compute.utils [None req-fc106d5b-533d-42f2-9e23-d15b663f98ba tempest-ServerDiagnosticsNegativeTest-1726197908 tempest-ServerDiagnosticsNegativeTest-1726197908-project-member] Using /dev/sd instead of None {{(pid=62503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 764.328424] env[62503]: DEBUG nova.compute.manager [None req-fc106d5b-533d-42f2-9e23-d15b663f98ba tempest-ServerDiagnosticsNegativeTest-1726197908 tempest-ServerDiagnosticsNegativeTest-1726197908-project-member] [instance: 529e6f8e-49b9-46a7-a09f-17238522f7bc] Allocating IP information in the background. {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 764.328593] env[62503]: DEBUG nova.network.neutron [None req-fc106d5b-533d-42f2-9e23-d15b663f98ba tempest-ServerDiagnosticsNegativeTest-1726197908 tempest-ServerDiagnosticsNegativeTest-1726197908-project-member] [instance: 529e6f8e-49b9-46a7-a09f-17238522f7bc] allocate_for_instance() {{(pid=62503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 764.377102] env[62503]: DEBUG nova.policy [None req-fc106d5b-533d-42f2-9e23-d15b663f98ba tempest-ServerDiagnosticsNegativeTest-1726197908 tempest-ServerDiagnosticsNegativeTest-1726197908-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ca62582cb5f448b98d9dd165c44427cd', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b6efe7bac8a64e05af00e39924f81214', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62503) authorize /opt/stack/nova/nova/policy.py:201}} [ 764.599540] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51510de4-0e22-4429-86dd-68524c40f105 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.610896] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e7dd557-d951-47a1-a784-ac27b79c78f7 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.652105] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd33588c-00a0-4449-b160-84965564e1b2 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.660628] env[62503]: DEBUG oslo_vmware.api [None req-0b764e2d-6aca-40d5-ae09-46fe33425281 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]524c9bc9-ba8b-a4b3-2336-8023c9c1a7d3, 'name': SearchDatastore_Task, 'duration_secs': 0.0082} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 764.663197] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a0250687-44bb-4fde-a005-433ca9f4ed70 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.666277] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-185c2843-90a1-46b2-9f2d-c2dbc209add0 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.672701] env[62503]: DEBUG oslo_vmware.api [None req-0b764e2d-6aca-40d5-ae09-46fe33425281 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Waiting for the task: (returnval){ [ 764.672701] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52b3b831-fa7f-27a0-8d9c-8e9c327c4e9e" [ 764.672701] env[62503]: _type = "Task" [ 764.672701] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 764.680884] env[62503]: DEBUG nova.compute.provider_tree [None req-d48c1997-54b2-410a-b1fd-f52e7748bdfd tempest-ServerGroupTestJSON-1883967840 tempest-ServerGroupTestJSON-1883967840-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 764.690370] env[62503]: DEBUG oslo_vmware.api [None req-0b764e2d-6aca-40d5-ae09-46fe33425281 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52b3b831-fa7f-27a0-8d9c-8e9c327c4e9e, 'name': SearchDatastore_Task, 'duration_secs': 0.00929} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 764.691250] env[62503]: DEBUG oslo_concurrency.lockutils [None req-0b764e2d-6aca-40d5-ae09-46fe33425281 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 764.691570] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b764e2d-6aca-40d5-ae09-46fe33425281 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk to [datastore2] d4990c66-63d5-43b0-8187-2074c99ccde2/d4990c66-63d5-43b0-8187-2074c99ccde2.vmdk {{(pid=62503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 764.691854] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-70a621b8-2f9a-4af8-a2af-f00fc3b35a07 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.699118] env[62503]: DEBUG oslo_vmware.api [None req-0b764e2d-6aca-40d5-ae09-46fe33425281 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Waiting for the task: (returnval){ [ 764.699118] env[62503]: value = "task-1387745" [ 764.699118] env[62503]: _type = "Task" [ 764.699118] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 764.707228] env[62503]: DEBUG oslo_vmware.api [None req-0b764e2d-6aca-40d5-ae09-46fe33425281 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Task: {'id': task-1387745, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.752017] env[62503]: DEBUG nova.network.neutron [None req-fc106d5b-533d-42f2-9e23-d15b663f98ba tempest-ServerDiagnosticsNegativeTest-1726197908 tempest-ServerDiagnosticsNegativeTest-1726197908-project-member] [instance: 529e6f8e-49b9-46a7-a09f-17238522f7bc] Successfully created port: 6db65cc5-b279-427b-812c-0c5fc0661b67 {{(pid=62503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 764.785209] env[62503]: DEBUG nova.network.neutron [None req-be269e0c-079d-42ba-b169-f1c6935976f8 tempest-ServerMetadataTestJSON-2107799143 tempest-ServerMetadataTestJSON-2107799143-project-member] [instance: ff56659a-18f8-44c5-ab10-872e636a9357] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 764.829402] env[62503]: DEBUG nova.compute.manager [None req-fc106d5b-533d-42f2-9e23-d15b663f98ba tempest-ServerDiagnosticsNegativeTest-1726197908 tempest-ServerDiagnosticsNegativeTest-1726197908-project-member] [instance: 529e6f8e-49b9-46a7-a09f-17238522f7bc] Start building block device mappings for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2867}} [ 764.880977] env[62503]: DEBUG nova.network.neutron [None req-be269e0c-079d-42ba-b169-f1c6935976f8 tempest-ServerMetadataTestJSON-2107799143 tempest-ServerMetadataTestJSON-2107799143-project-member] [instance: ff56659a-18f8-44c5-ab10-872e636a9357] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 765.187500] env[62503]: DEBUG nova.scheduler.client.report [None req-d48c1997-54b2-410a-b1fd-f52e7748bdfd tempest-ServerGroupTestJSON-1883967840 tempest-ServerGroupTestJSON-1883967840-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 765.192286] env[62503]: DEBUG nova.compute.manager [req-48b33371-ea2e-4160-a7c0-4509797a551c req-4f1cb707-1c72-47fb-a596-9cba0b8c3c37 service nova] [instance: ff56659a-18f8-44c5-ab10-872e636a9357] Received event network-vif-deleted-c1062fca-9848-40d4-bc22-18b2ce47b6c7 {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 765.208584] env[62503]: DEBUG oslo_vmware.api [None req-0b764e2d-6aca-40d5-ae09-46fe33425281 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Task: {'id': task-1387745, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.446583} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 765.209361] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b764e2d-6aca-40d5-ae09-46fe33425281 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk to [datastore2] d4990c66-63d5-43b0-8187-2074c99ccde2/d4990c66-63d5-43b0-8187-2074c99ccde2.vmdk {{(pid=62503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 765.209575] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-0b764e2d-6aca-40d5-ae09-46fe33425281 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] [instance: d4990c66-63d5-43b0-8187-2074c99ccde2] Extending root virtual disk to 1048576 {{(pid=62503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 765.209811] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4bf36bda-9744-42d9-90f6-d85111078bdd {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.216119] env[62503]: DEBUG oslo_vmware.api [None req-0b764e2d-6aca-40d5-ae09-46fe33425281 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Waiting for the task: (returnval){ [ 765.216119] env[62503]: value = "task-1387746" [ 765.216119] env[62503]: _type = "Task" [ 765.216119] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 765.224259] env[62503]: DEBUG oslo_vmware.api [None req-0b764e2d-6aca-40d5-ae09-46fe33425281 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Task: {'id': task-1387746, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.382403] env[62503]: DEBUG oslo_concurrency.lockutils [None req-be269e0c-079d-42ba-b169-f1c6935976f8 tempest-ServerMetadataTestJSON-2107799143 tempest-ServerMetadataTestJSON-2107799143-project-member] Releasing lock "refresh_cache-ff56659a-18f8-44c5-ab10-872e636a9357" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 765.382814] env[62503]: DEBUG nova.compute.manager [None req-be269e0c-079d-42ba-b169-f1c6935976f8 tempest-ServerMetadataTestJSON-2107799143 tempest-ServerMetadataTestJSON-2107799143-project-member] [instance: ff56659a-18f8-44c5-ab10-872e636a9357] Start destroying the instance on the hypervisor. {{(pid=62503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3156}} [ 765.383012] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-be269e0c-079d-42ba-b169-f1c6935976f8 tempest-ServerMetadataTestJSON-2107799143 tempest-ServerMetadataTestJSON-2107799143-project-member] [instance: ff56659a-18f8-44c5-ab10-872e636a9357] Destroying instance {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 765.383293] env[62503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-82dfc868-2dab-4473-b9ab-5b4ba5e14f51 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.391869] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-787d3b1c-a7f7-4af0-b6f8-f2b12be895f8 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.416266] env[62503]: WARNING nova.virt.vmwareapi.vmops [None req-be269e0c-079d-42ba-b169-f1c6935976f8 tempest-ServerMetadataTestJSON-2107799143 tempest-ServerMetadataTestJSON-2107799143-project-member] [instance: ff56659a-18f8-44c5-ab10-872e636a9357] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance ff56659a-18f8-44c5-ab10-872e636a9357 could not be found. [ 765.416497] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-be269e0c-079d-42ba-b169-f1c6935976f8 tempest-ServerMetadataTestJSON-2107799143 tempest-ServerMetadataTestJSON-2107799143-project-member] [instance: ff56659a-18f8-44c5-ab10-872e636a9357] Instance destroyed {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 765.416678] env[62503]: INFO nova.compute.manager [None req-be269e0c-079d-42ba-b169-f1c6935976f8 tempest-ServerMetadataTestJSON-2107799143 tempest-ServerMetadataTestJSON-2107799143-project-member] [instance: ff56659a-18f8-44c5-ab10-872e636a9357] Took 0.03 seconds to destroy the instance on the hypervisor. [ 765.416923] env[62503]: DEBUG oslo.service.loopingcall [None req-be269e0c-079d-42ba-b169-f1c6935976f8 tempest-ServerMetadataTestJSON-2107799143 tempest-ServerMetadataTestJSON-2107799143-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 765.417155] env[62503]: DEBUG nova.compute.manager [-] [instance: ff56659a-18f8-44c5-ab10-872e636a9357] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 765.417246] env[62503]: DEBUG nova.network.neutron [-] [instance: ff56659a-18f8-44c5-ab10-872e636a9357] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 765.433366] env[62503]: DEBUG nova.network.neutron [-] [instance: ff56659a-18f8-44c5-ab10-872e636a9357] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 765.694688] env[62503]: DEBUG oslo_concurrency.lockutils [None req-d48c1997-54b2-410a-b1fd-f52e7748bdfd tempest-ServerGroupTestJSON-1883967840 tempest-ServerGroupTestJSON-1883967840-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.875s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 765.695427] env[62503]: ERROR nova.compute.manager [None req-d48c1997-54b2-410a-b1fd-f52e7748bdfd tempest-ServerGroupTestJSON-1883967840 tempest-ServerGroupTestJSON-1883967840-project-member] [instance: be79632e-78ca-440a-92ef-d86a9f32693e] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 8c89f1b3-1e7b-4a8d-94c5-7b98ceba3b34, please check neutron logs for more information. [ 765.695427] env[62503]: ERROR nova.compute.manager [instance: be79632e-78ca-440a-92ef-d86a9f32693e] Traceback (most recent call last): [ 765.695427] env[62503]: ERROR nova.compute.manager [instance: be79632e-78ca-440a-92ef-d86a9f32693e] File "/opt/stack/nova/nova/compute/manager.py", line 2644, in _build_and_run_instance [ 765.695427] env[62503]: ERROR nova.compute.manager [instance: be79632e-78ca-440a-92ef-d86a9f32693e] self.driver.spawn(context, instance, image_meta, [ 765.695427] env[62503]: ERROR nova.compute.manager [instance: be79632e-78ca-440a-92ef-d86a9f32693e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 765.695427] env[62503]: ERROR nova.compute.manager [instance: be79632e-78ca-440a-92ef-d86a9f32693e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 765.695427] env[62503]: ERROR nova.compute.manager [instance: be79632e-78ca-440a-92ef-d86a9f32693e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 765.695427] env[62503]: ERROR nova.compute.manager [instance: be79632e-78ca-440a-92ef-d86a9f32693e] vm_ref = self.build_virtual_machine(instance, [ 765.695427] env[62503]: ERROR nova.compute.manager [instance: be79632e-78ca-440a-92ef-d86a9f32693e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 765.695427] env[62503]: ERROR nova.compute.manager [instance: be79632e-78ca-440a-92ef-d86a9f32693e] vif_infos = vmwarevif.get_vif_info(self._session, [ 765.695427] env[62503]: ERROR nova.compute.manager [instance: be79632e-78ca-440a-92ef-d86a9f32693e] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 765.695907] env[62503]: ERROR nova.compute.manager [instance: be79632e-78ca-440a-92ef-d86a9f32693e] for vif in network_info: [ 765.695907] env[62503]: ERROR nova.compute.manager [instance: be79632e-78ca-440a-92ef-d86a9f32693e] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 765.695907] env[62503]: ERROR nova.compute.manager [instance: be79632e-78ca-440a-92ef-d86a9f32693e] return self._sync_wrapper(fn, *args, **kwargs) [ 765.695907] env[62503]: ERROR nova.compute.manager [instance: be79632e-78ca-440a-92ef-d86a9f32693e] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 765.695907] env[62503]: ERROR nova.compute.manager [instance: be79632e-78ca-440a-92ef-d86a9f32693e] self.wait() [ 765.695907] env[62503]: ERROR nova.compute.manager [instance: be79632e-78ca-440a-92ef-d86a9f32693e] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 765.695907] env[62503]: ERROR nova.compute.manager [instance: be79632e-78ca-440a-92ef-d86a9f32693e] self[:] = self._gt.wait() [ 765.695907] env[62503]: ERROR nova.compute.manager [instance: be79632e-78ca-440a-92ef-d86a9f32693e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 765.695907] env[62503]: ERROR nova.compute.manager [instance: be79632e-78ca-440a-92ef-d86a9f32693e] return self._exit_event.wait() [ 765.695907] env[62503]: ERROR nova.compute.manager [instance: be79632e-78ca-440a-92ef-d86a9f32693e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 765.695907] env[62503]: ERROR nova.compute.manager [instance: be79632e-78ca-440a-92ef-d86a9f32693e] result = hub.switch() [ 765.695907] env[62503]: ERROR nova.compute.manager [instance: be79632e-78ca-440a-92ef-d86a9f32693e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 765.695907] env[62503]: ERROR nova.compute.manager [instance: be79632e-78ca-440a-92ef-d86a9f32693e] return self.greenlet.switch() [ 765.696672] env[62503]: ERROR nova.compute.manager [instance: be79632e-78ca-440a-92ef-d86a9f32693e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 765.696672] env[62503]: ERROR nova.compute.manager [instance: be79632e-78ca-440a-92ef-d86a9f32693e] result = function(*args, **kwargs) [ 765.696672] env[62503]: ERROR nova.compute.manager [instance: be79632e-78ca-440a-92ef-d86a9f32693e] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 765.696672] env[62503]: ERROR nova.compute.manager [instance: be79632e-78ca-440a-92ef-d86a9f32693e] return func(*args, **kwargs) [ 765.696672] env[62503]: ERROR nova.compute.manager [instance: be79632e-78ca-440a-92ef-d86a9f32693e] File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 765.696672] env[62503]: ERROR nova.compute.manager [instance: be79632e-78ca-440a-92ef-d86a9f32693e] raise e [ 765.696672] env[62503]: ERROR nova.compute.manager [instance: be79632e-78ca-440a-92ef-d86a9f32693e] File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 765.696672] env[62503]: ERROR nova.compute.manager [instance: be79632e-78ca-440a-92ef-d86a9f32693e] nwinfo = self.network_api.allocate_for_instance( [ 765.696672] env[62503]: ERROR nova.compute.manager [instance: be79632e-78ca-440a-92ef-d86a9f32693e] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 765.696672] env[62503]: ERROR nova.compute.manager [instance: be79632e-78ca-440a-92ef-d86a9f32693e] created_port_ids = self._update_ports_for_instance( [ 765.696672] env[62503]: ERROR nova.compute.manager [instance: be79632e-78ca-440a-92ef-d86a9f32693e] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 765.696672] env[62503]: ERROR nova.compute.manager [instance: be79632e-78ca-440a-92ef-d86a9f32693e] with excutils.save_and_reraise_exception(): [ 765.696672] env[62503]: ERROR nova.compute.manager [instance: be79632e-78ca-440a-92ef-d86a9f32693e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 765.697111] env[62503]: ERROR nova.compute.manager [instance: be79632e-78ca-440a-92ef-d86a9f32693e] self.force_reraise() [ 765.697111] env[62503]: ERROR nova.compute.manager [instance: be79632e-78ca-440a-92ef-d86a9f32693e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 765.697111] env[62503]: ERROR nova.compute.manager [instance: be79632e-78ca-440a-92ef-d86a9f32693e] raise self.value [ 765.697111] env[62503]: ERROR nova.compute.manager [instance: be79632e-78ca-440a-92ef-d86a9f32693e] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 765.697111] env[62503]: ERROR nova.compute.manager [instance: be79632e-78ca-440a-92ef-d86a9f32693e] updated_port = self._update_port( [ 765.697111] env[62503]: ERROR nova.compute.manager [instance: be79632e-78ca-440a-92ef-d86a9f32693e] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 765.697111] env[62503]: ERROR nova.compute.manager [instance: be79632e-78ca-440a-92ef-d86a9f32693e] _ensure_no_port_binding_failure(port) [ 765.697111] env[62503]: ERROR nova.compute.manager [instance: be79632e-78ca-440a-92ef-d86a9f32693e] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 765.697111] env[62503]: ERROR nova.compute.manager [instance: be79632e-78ca-440a-92ef-d86a9f32693e] raise exception.PortBindingFailed(port_id=port['id']) [ 765.697111] env[62503]: ERROR nova.compute.manager [instance: be79632e-78ca-440a-92ef-d86a9f32693e] nova.exception.PortBindingFailed: Binding failed for port 8c89f1b3-1e7b-4a8d-94c5-7b98ceba3b34, please check neutron logs for more information. [ 765.697111] env[62503]: ERROR nova.compute.manager [instance: be79632e-78ca-440a-92ef-d86a9f32693e] [ 765.697667] env[62503]: DEBUG nova.compute.utils [None req-d48c1997-54b2-410a-b1fd-f52e7748bdfd tempest-ServerGroupTestJSON-1883967840 tempest-ServerGroupTestJSON-1883967840-project-member] [instance: be79632e-78ca-440a-92ef-d86a9f32693e] Binding failed for port 8c89f1b3-1e7b-4a8d-94c5-7b98ceba3b34, please check neutron logs for more information. {{(pid=62503) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 765.697667] env[62503]: DEBUG oslo_concurrency.lockutils [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 18.412s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 765.697667] env[62503]: DEBUG oslo_concurrency.lockutils [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 765.697667] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62503) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 765.698366] env[62503]: DEBUG oslo_concurrency.lockutils [None req-ecc5c12d-2a3e-430a-9c3c-15d45f1a4068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.476s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 765.699528] env[62503]: INFO nova.compute.claims [None req-ecc5c12d-2a3e-430a-9c3c-15d45f1a4068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: ad7badc9-cb11-4532-885a-28fb3d4de9ef] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 765.703068] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87dea224-e7ae-4cce-a4df-eb7c18114544 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.708161] env[62503]: DEBUG nova.compute.manager [None req-d48c1997-54b2-410a-b1fd-f52e7748bdfd tempest-ServerGroupTestJSON-1883967840 tempest-ServerGroupTestJSON-1883967840-project-member] [instance: be79632e-78ca-440a-92ef-d86a9f32693e] Build of instance be79632e-78ca-440a-92ef-d86a9f32693e was re-scheduled: Binding failed for port 8c89f1b3-1e7b-4a8d-94c5-7b98ceba3b34, please check neutron logs for more information. {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2483}} [ 765.709350] env[62503]: DEBUG nova.compute.manager [None req-d48c1997-54b2-410a-b1fd-f52e7748bdfd tempest-ServerGroupTestJSON-1883967840 tempest-ServerGroupTestJSON-1883967840-project-member] [instance: be79632e-78ca-440a-92ef-d86a9f32693e] Unplugging VIFs for instance {{(pid=62503) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3009}} [ 765.709350] env[62503]: DEBUG oslo_concurrency.lockutils [None req-d48c1997-54b2-410a-b1fd-f52e7748bdfd tempest-ServerGroupTestJSON-1883967840 tempest-ServerGroupTestJSON-1883967840-project-member] Acquiring lock "refresh_cache-be79632e-78ca-440a-92ef-d86a9f32693e" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 765.709350] env[62503]: DEBUG oslo_concurrency.lockutils [None req-d48c1997-54b2-410a-b1fd-f52e7748bdfd tempest-ServerGroupTestJSON-1883967840 tempest-ServerGroupTestJSON-1883967840-project-member] Acquired lock "refresh_cache-be79632e-78ca-440a-92ef-d86a9f32693e" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 765.709350] env[62503]: DEBUG nova.network.neutron [None req-d48c1997-54b2-410a-b1fd-f52e7748bdfd tempest-ServerGroupTestJSON-1883967840 tempest-ServerGroupTestJSON-1883967840-project-member] [instance: be79632e-78ca-440a-92ef-d86a9f32693e] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 765.713094] env[62503]: ERROR nova.compute.manager [None req-fc106d5b-533d-42f2-9e23-d15b663f98ba tempest-ServerDiagnosticsNegativeTest-1726197908 tempest-ServerDiagnosticsNegativeTest-1726197908-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 6db65cc5-b279-427b-812c-0c5fc0661b67, please check neutron logs for more information. [ 765.713094] env[62503]: ERROR nova.compute.manager Traceback (most recent call last): [ 765.713094] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 765.713094] env[62503]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 765.713094] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 765.713094] env[62503]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 765.713094] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 765.713094] env[62503]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 765.713094] env[62503]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 765.713094] env[62503]: ERROR nova.compute.manager self.force_reraise() [ 765.713094] env[62503]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 765.713094] env[62503]: ERROR nova.compute.manager raise self.value [ 765.713094] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 765.713094] env[62503]: ERROR nova.compute.manager updated_port = self._update_port( [ 765.713094] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 765.713094] env[62503]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 765.713586] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 765.713586] env[62503]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 765.713586] env[62503]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 6db65cc5-b279-427b-812c-0c5fc0661b67, please check neutron logs for more information. [ 765.713586] env[62503]: ERROR nova.compute.manager [ 765.713586] env[62503]: Traceback (most recent call last): [ 765.713586] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 765.713586] env[62503]: listener.cb(fileno) [ 765.713586] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 765.713586] env[62503]: result = function(*args, **kwargs) [ 765.713586] env[62503]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 765.713586] env[62503]: return func(*args, **kwargs) [ 765.713586] env[62503]: File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 765.713586] env[62503]: raise e [ 765.713586] env[62503]: File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 765.713586] env[62503]: nwinfo = self.network_api.allocate_for_instance( [ 765.713586] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 765.713586] env[62503]: created_port_ids = self._update_ports_for_instance( [ 765.713586] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 765.713586] env[62503]: with excutils.save_and_reraise_exception(): [ 765.713586] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 765.713586] env[62503]: self.force_reraise() [ 765.713586] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 765.713586] env[62503]: raise self.value [ 765.713586] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 765.713586] env[62503]: updated_port = self._update_port( [ 765.713586] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 765.713586] env[62503]: _ensure_no_port_binding_failure(port) [ 765.713586] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 765.713586] env[62503]: raise exception.PortBindingFailed(port_id=port['id']) [ 765.714490] env[62503]: nova.exception.PortBindingFailed: Binding failed for port 6db65cc5-b279-427b-812c-0c5fc0661b67, please check neutron logs for more information. [ 765.714490] env[62503]: Removing descriptor: 16 [ 765.714564] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-636e803a-a7d6-4152-a2cd-8683fdb940c4 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.734730] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb6a27dc-5588-46b1-a5d1-cfe53784e17c {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.737251] env[62503]: DEBUG oslo_vmware.api [None req-0b764e2d-6aca-40d5-ae09-46fe33425281 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Task: {'id': task-1387746, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.058189} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 765.737743] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-0b764e2d-6aca-40d5-ae09-46fe33425281 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] [instance: d4990c66-63d5-43b0-8187-2074c99ccde2] Extended root virtual disk {{(pid=62503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 765.738806] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e625fff8-4a22-4a20-9e07-28852e800e6e {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.744423] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73c97c15-a64c-4642-bee3-c3da235b02d0 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.763047] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-0b764e2d-6aca-40d5-ae09-46fe33425281 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] [instance: d4990c66-63d5-43b0-8187-2074c99ccde2] Reconfiguring VM instance instance-0000002d to attach disk [datastore2] d4990c66-63d5-43b0-8187-2074c99ccde2/d4990c66-63d5-43b0-8187-2074c99ccde2.vmdk or device None with type sparse {{(pid=62503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 765.763047] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cda77411-f871-49ad-8dd2-d9f2383cf024 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.801691] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181377MB free_disk=175GB free_vcpus=48 pci_devices=None {{(pid=62503) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 765.801846] env[62503]: DEBUG oslo_concurrency.lockutils [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 765.807956] env[62503]: DEBUG oslo_vmware.api [None req-0b764e2d-6aca-40d5-ae09-46fe33425281 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Waiting for the task: (returnval){ [ 765.807956] env[62503]: value = "task-1387747" [ 765.807956] env[62503]: _type = "Task" [ 765.807956] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 765.815931] env[62503]: DEBUG oslo_vmware.api [None req-0b764e2d-6aca-40d5-ae09-46fe33425281 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Task: {'id': task-1387747, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.837611] env[62503]: DEBUG nova.compute.manager [None req-fc106d5b-533d-42f2-9e23-d15b663f98ba tempest-ServerDiagnosticsNegativeTest-1726197908 tempest-ServerDiagnosticsNegativeTest-1726197908-project-member] [instance: 529e6f8e-49b9-46a7-a09f-17238522f7bc] Start spawning the instance on the hypervisor. {{(pid=62503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2641}} [ 765.864422] env[62503]: DEBUG nova.virt.hardware [None req-fc106d5b-533d-42f2-9e23-d15b663f98ba tempest-ServerDiagnosticsNegativeTest-1726197908 tempest-ServerDiagnosticsNegativeTest-1726197908-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:26:20Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:26:03Z,direct_url=,disk_format='vmdk',id=8150ca02-f879-471d-8913-459408f127a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26d9ee75d25b4018b8bb1fb11c8bd98c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:26:04Z,virtual_size=,visibility=), allow threads: False {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 765.864666] env[62503]: DEBUG nova.virt.hardware [None req-fc106d5b-533d-42f2-9e23-d15b663f98ba tempest-ServerDiagnosticsNegativeTest-1726197908 tempest-ServerDiagnosticsNegativeTest-1726197908-project-member] Flavor limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 765.864823] env[62503]: DEBUG nova.virt.hardware [None req-fc106d5b-533d-42f2-9e23-d15b663f98ba tempest-ServerDiagnosticsNegativeTest-1726197908 tempest-ServerDiagnosticsNegativeTest-1726197908-project-member] Image limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 765.865014] env[62503]: DEBUG nova.virt.hardware [None req-fc106d5b-533d-42f2-9e23-d15b663f98ba tempest-ServerDiagnosticsNegativeTest-1726197908 tempest-ServerDiagnosticsNegativeTest-1726197908-project-member] Flavor pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 765.865164] env[62503]: DEBUG nova.virt.hardware [None req-fc106d5b-533d-42f2-9e23-d15b663f98ba tempest-ServerDiagnosticsNegativeTest-1726197908 tempest-ServerDiagnosticsNegativeTest-1726197908-project-member] Image pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 765.865311] env[62503]: DEBUG nova.virt.hardware [None req-fc106d5b-533d-42f2-9e23-d15b663f98ba tempest-ServerDiagnosticsNegativeTest-1726197908 tempest-ServerDiagnosticsNegativeTest-1726197908-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 765.865512] env[62503]: DEBUG nova.virt.hardware [None req-fc106d5b-533d-42f2-9e23-d15b663f98ba tempest-ServerDiagnosticsNegativeTest-1726197908 tempest-ServerDiagnosticsNegativeTest-1726197908-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 765.865671] env[62503]: DEBUG nova.virt.hardware [None req-fc106d5b-533d-42f2-9e23-d15b663f98ba tempest-ServerDiagnosticsNegativeTest-1726197908 tempest-ServerDiagnosticsNegativeTest-1726197908-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 765.865836] env[62503]: DEBUG nova.virt.hardware [None req-fc106d5b-533d-42f2-9e23-d15b663f98ba tempest-ServerDiagnosticsNegativeTest-1726197908 tempest-ServerDiagnosticsNegativeTest-1726197908-project-member] Got 1 possible topologies {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 765.866007] env[62503]: DEBUG nova.virt.hardware [None req-fc106d5b-533d-42f2-9e23-d15b663f98ba tempest-ServerDiagnosticsNegativeTest-1726197908 tempest-ServerDiagnosticsNegativeTest-1726197908-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 765.866190] env[62503]: DEBUG nova.virt.hardware [None req-fc106d5b-533d-42f2-9e23-d15b663f98ba tempest-ServerDiagnosticsNegativeTest-1726197908 tempest-ServerDiagnosticsNegativeTest-1726197908-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 765.867088] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49e59801-d0c1-435c-a956-4764f7c4ed0f {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.874510] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66cd3ab9-0a3d-450f-b3cb-d226476a6cb9 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.888113] env[62503]: ERROR nova.compute.manager [None req-fc106d5b-533d-42f2-9e23-d15b663f98ba tempest-ServerDiagnosticsNegativeTest-1726197908 tempest-ServerDiagnosticsNegativeTest-1726197908-project-member] [instance: 529e6f8e-49b9-46a7-a09f-17238522f7bc] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 6db65cc5-b279-427b-812c-0c5fc0661b67, please check neutron logs for more information. [ 765.888113] env[62503]: ERROR nova.compute.manager [instance: 529e6f8e-49b9-46a7-a09f-17238522f7bc] Traceback (most recent call last): [ 765.888113] env[62503]: ERROR nova.compute.manager [instance: 529e6f8e-49b9-46a7-a09f-17238522f7bc] File "/opt/stack/nova/nova/compute/manager.py", line 2897, in _build_resources [ 765.888113] env[62503]: ERROR nova.compute.manager [instance: 529e6f8e-49b9-46a7-a09f-17238522f7bc] yield resources [ 765.888113] env[62503]: ERROR nova.compute.manager [instance: 529e6f8e-49b9-46a7-a09f-17238522f7bc] File "/opt/stack/nova/nova/compute/manager.py", line 2644, in _build_and_run_instance [ 765.888113] env[62503]: ERROR nova.compute.manager [instance: 529e6f8e-49b9-46a7-a09f-17238522f7bc] self.driver.spawn(context, instance, image_meta, [ 765.888113] env[62503]: ERROR nova.compute.manager [instance: 529e6f8e-49b9-46a7-a09f-17238522f7bc] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 765.888113] env[62503]: ERROR nova.compute.manager [instance: 529e6f8e-49b9-46a7-a09f-17238522f7bc] self._vmops.spawn(context, instance, image_meta, injected_files, [ 765.888113] env[62503]: ERROR nova.compute.manager [instance: 529e6f8e-49b9-46a7-a09f-17238522f7bc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 765.888113] env[62503]: ERROR nova.compute.manager [instance: 529e6f8e-49b9-46a7-a09f-17238522f7bc] vm_ref = self.build_virtual_machine(instance, [ 765.888113] env[62503]: ERROR nova.compute.manager [instance: 529e6f8e-49b9-46a7-a09f-17238522f7bc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 765.888524] env[62503]: ERROR nova.compute.manager [instance: 529e6f8e-49b9-46a7-a09f-17238522f7bc] vif_infos = vmwarevif.get_vif_info(self._session, [ 765.888524] env[62503]: ERROR nova.compute.manager [instance: 529e6f8e-49b9-46a7-a09f-17238522f7bc] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 765.888524] env[62503]: ERROR nova.compute.manager [instance: 529e6f8e-49b9-46a7-a09f-17238522f7bc] for vif in network_info: [ 765.888524] env[62503]: ERROR nova.compute.manager [instance: 529e6f8e-49b9-46a7-a09f-17238522f7bc] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 765.888524] env[62503]: ERROR nova.compute.manager [instance: 529e6f8e-49b9-46a7-a09f-17238522f7bc] return self._sync_wrapper(fn, *args, **kwargs) [ 765.888524] env[62503]: ERROR nova.compute.manager [instance: 529e6f8e-49b9-46a7-a09f-17238522f7bc] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 765.888524] env[62503]: ERROR nova.compute.manager [instance: 529e6f8e-49b9-46a7-a09f-17238522f7bc] self.wait() [ 765.888524] env[62503]: ERROR nova.compute.manager [instance: 529e6f8e-49b9-46a7-a09f-17238522f7bc] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 765.888524] env[62503]: ERROR nova.compute.manager [instance: 529e6f8e-49b9-46a7-a09f-17238522f7bc] self[:] = self._gt.wait() [ 765.888524] env[62503]: ERROR nova.compute.manager [instance: 529e6f8e-49b9-46a7-a09f-17238522f7bc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 765.888524] env[62503]: ERROR nova.compute.manager [instance: 529e6f8e-49b9-46a7-a09f-17238522f7bc] return self._exit_event.wait() [ 765.888524] env[62503]: ERROR nova.compute.manager [instance: 529e6f8e-49b9-46a7-a09f-17238522f7bc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 765.888524] env[62503]: ERROR nova.compute.manager [instance: 529e6f8e-49b9-46a7-a09f-17238522f7bc] current.throw(*self._exc) [ 765.889016] env[62503]: ERROR nova.compute.manager [instance: 529e6f8e-49b9-46a7-a09f-17238522f7bc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 765.889016] env[62503]: ERROR nova.compute.manager [instance: 529e6f8e-49b9-46a7-a09f-17238522f7bc] result = function(*args, **kwargs) [ 765.889016] env[62503]: ERROR nova.compute.manager [instance: 529e6f8e-49b9-46a7-a09f-17238522f7bc] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 765.889016] env[62503]: ERROR nova.compute.manager [instance: 529e6f8e-49b9-46a7-a09f-17238522f7bc] return func(*args, **kwargs) [ 765.889016] env[62503]: ERROR nova.compute.manager [instance: 529e6f8e-49b9-46a7-a09f-17238522f7bc] File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 765.889016] env[62503]: ERROR nova.compute.manager [instance: 529e6f8e-49b9-46a7-a09f-17238522f7bc] raise e [ 765.889016] env[62503]: ERROR nova.compute.manager [instance: 529e6f8e-49b9-46a7-a09f-17238522f7bc] File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 765.889016] env[62503]: ERROR nova.compute.manager [instance: 529e6f8e-49b9-46a7-a09f-17238522f7bc] nwinfo = self.network_api.allocate_for_instance( [ 765.889016] env[62503]: ERROR nova.compute.manager [instance: 529e6f8e-49b9-46a7-a09f-17238522f7bc] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 765.889016] env[62503]: ERROR nova.compute.manager [instance: 529e6f8e-49b9-46a7-a09f-17238522f7bc] created_port_ids = self._update_ports_for_instance( [ 765.889016] env[62503]: ERROR nova.compute.manager [instance: 529e6f8e-49b9-46a7-a09f-17238522f7bc] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 765.889016] env[62503]: ERROR nova.compute.manager [instance: 529e6f8e-49b9-46a7-a09f-17238522f7bc] with excutils.save_and_reraise_exception(): [ 765.889016] env[62503]: ERROR nova.compute.manager [instance: 529e6f8e-49b9-46a7-a09f-17238522f7bc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 765.889467] env[62503]: ERROR nova.compute.manager [instance: 529e6f8e-49b9-46a7-a09f-17238522f7bc] self.force_reraise() [ 765.889467] env[62503]: ERROR nova.compute.manager [instance: 529e6f8e-49b9-46a7-a09f-17238522f7bc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 765.889467] env[62503]: ERROR nova.compute.manager [instance: 529e6f8e-49b9-46a7-a09f-17238522f7bc] raise self.value [ 765.889467] env[62503]: ERROR nova.compute.manager [instance: 529e6f8e-49b9-46a7-a09f-17238522f7bc] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 765.889467] env[62503]: ERROR nova.compute.manager [instance: 529e6f8e-49b9-46a7-a09f-17238522f7bc] updated_port = self._update_port( [ 765.889467] env[62503]: ERROR nova.compute.manager [instance: 529e6f8e-49b9-46a7-a09f-17238522f7bc] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 765.889467] env[62503]: ERROR nova.compute.manager [instance: 529e6f8e-49b9-46a7-a09f-17238522f7bc] _ensure_no_port_binding_failure(port) [ 765.889467] env[62503]: ERROR nova.compute.manager [instance: 529e6f8e-49b9-46a7-a09f-17238522f7bc] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 765.889467] env[62503]: ERROR nova.compute.manager [instance: 529e6f8e-49b9-46a7-a09f-17238522f7bc] raise exception.PortBindingFailed(port_id=port['id']) [ 765.889467] env[62503]: ERROR nova.compute.manager [instance: 529e6f8e-49b9-46a7-a09f-17238522f7bc] nova.exception.PortBindingFailed: Binding failed for port 6db65cc5-b279-427b-812c-0c5fc0661b67, please check neutron logs for more information. [ 765.889467] env[62503]: ERROR nova.compute.manager [instance: 529e6f8e-49b9-46a7-a09f-17238522f7bc] [ 765.889467] env[62503]: INFO nova.compute.manager [None req-fc106d5b-533d-42f2-9e23-d15b663f98ba tempest-ServerDiagnosticsNegativeTest-1726197908 tempest-ServerDiagnosticsNegativeTest-1726197908-project-member] [instance: 529e6f8e-49b9-46a7-a09f-17238522f7bc] Terminating instance [ 765.890906] env[62503]: DEBUG oslo_concurrency.lockutils [None req-fc106d5b-533d-42f2-9e23-d15b663f98ba tempest-ServerDiagnosticsNegativeTest-1726197908 tempest-ServerDiagnosticsNegativeTest-1726197908-project-member] Acquiring lock "refresh_cache-529e6f8e-49b9-46a7-a09f-17238522f7bc" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 765.891911] env[62503]: DEBUG oslo_concurrency.lockutils [None req-fc106d5b-533d-42f2-9e23-d15b663f98ba tempest-ServerDiagnosticsNegativeTest-1726197908 tempest-ServerDiagnosticsNegativeTest-1726197908-project-member] Acquired lock "refresh_cache-529e6f8e-49b9-46a7-a09f-17238522f7bc" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 765.891911] env[62503]: DEBUG nova.network.neutron [None req-fc106d5b-533d-42f2-9e23-d15b663f98ba tempest-ServerDiagnosticsNegativeTest-1726197908 tempest-ServerDiagnosticsNegativeTest-1726197908-project-member] [instance: 529e6f8e-49b9-46a7-a09f-17238522f7bc] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 765.935571] env[62503]: DEBUG nova.network.neutron [-] [instance: ff56659a-18f8-44c5-ab10-872e636a9357] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 766.230800] env[62503]: DEBUG nova.network.neutron [None req-d48c1997-54b2-410a-b1fd-f52e7748bdfd tempest-ServerGroupTestJSON-1883967840 tempest-ServerGroupTestJSON-1883967840-project-member] [instance: be79632e-78ca-440a-92ef-d86a9f32693e] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 766.317388] env[62503]: DEBUG oslo_vmware.api [None req-0b764e2d-6aca-40d5-ae09-46fe33425281 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Task: {'id': task-1387747, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.325899] env[62503]: DEBUG nova.network.neutron [None req-d48c1997-54b2-410a-b1fd-f52e7748bdfd tempest-ServerGroupTestJSON-1883967840 tempest-ServerGroupTestJSON-1883967840-project-member] [instance: be79632e-78ca-440a-92ef-d86a9f32693e] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 766.407660] env[62503]: DEBUG nova.network.neutron [None req-fc106d5b-533d-42f2-9e23-d15b663f98ba tempest-ServerDiagnosticsNegativeTest-1726197908 tempest-ServerDiagnosticsNegativeTest-1726197908-project-member] [instance: 529e6f8e-49b9-46a7-a09f-17238522f7bc] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 766.439037] env[62503]: INFO nova.compute.manager [-] [instance: ff56659a-18f8-44c5-ab10-872e636a9357] Took 1.02 seconds to deallocate network for instance. [ 766.440797] env[62503]: DEBUG nova.compute.claims [None req-be269e0c-079d-42ba-b169-f1c6935976f8 tempest-ServerMetadataTestJSON-2107799143 tempest-ServerMetadataTestJSON-2107799143-project-member] [instance: ff56659a-18f8-44c5-ab10-872e636a9357] Aborting claim: {{(pid=62503) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 766.440990] env[62503]: DEBUG oslo_concurrency.lockutils [None req-be269e0c-079d-42ba-b169-f1c6935976f8 tempest-ServerMetadataTestJSON-2107799143 tempest-ServerMetadataTestJSON-2107799143-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 766.452253] env[62503]: DEBUG nova.network.neutron [None req-fc106d5b-533d-42f2-9e23-d15b663f98ba tempest-ServerDiagnosticsNegativeTest-1726197908 tempest-ServerDiagnosticsNegativeTest-1726197908-project-member] [instance: 529e6f8e-49b9-46a7-a09f-17238522f7bc] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 766.820031] env[62503]: DEBUG oslo_vmware.api [None req-0b764e2d-6aca-40d5-ae09-46fe33425281 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Task: {'id': task-1387747, 'name': ReconfigVM_Task, 'duration_secs': 0.715295} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 766.820133] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-0b764e2d-6aca-40d5-ae09-46fe33425281 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] [instance: d4990c66-63d5-43b0-8187-2074c99ccde2] Reconfigured VM instance instance-0000002d to attach disk [datastore2] d4990c66-63d5-43b0-8187-2074c99ccde2/d4990c66-63d5-43b0-8187-2074c99ccde2.vmdk or device None with type sparse {{(pid=62503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 766.820730] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4ebd3845-cff7-426f-9c3b-6aedeeef0e9b {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.827180] env[62503]: DEBUG oslo_vmware.api [None req-0b764e2d-6aca-40d5-ae09-46fe33425281 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Waiting for the task: (returnval){ [ 766.827180] env[62503]: value = "task-1387748" [ 766.827180] env[62503]: _type = "Task" [ 766.827180] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 766.830135] env[62503]: DEBUG oslo_concurrency.lockutils [None req-d48c1997-54b2-410a-b1fd-f52e7748bdfd tempest-ServerGroupTestJSON-1883967840 tempest-ServerGroupTestJSON-1883967840-project-member] Releasing lock "refresh_cache-be79632e-78ca-440a-92ef-d86a9f32693e" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 766.830344] env[62503]: DEBUG nova.compute.manager [None req-d48c1997-54b2-410a-b1fd-f52e7748bdfd tempest-ServerGroupTestJSON-1883967840 tempest-ServerGroupTestJSON-1883967840-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62503) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3032}} [ 766.830516] env[62503]: DEBUG nova.compute.manager [None req-d48c1997-54b2-410a-b1fd-f52e7748bdfd tempest-ServerGroupTestJSON-1883967840 tempest-ServerGroupTestJSON-1883967840-project-member] [instance: be79632e-78ca-440a-92ef-d86a9f32693e] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 766.830678] env[62503]: DEBUG nova.network.neutron [None req-d48c1997-54b2-410a-b1fd-f52e7748bdfd tempest-ServerGroupTestJSON-1883967840 tempest-ServerGroupTestJSON-1883967840-project-member] [instance: be79632e-78ca-440a-92ef-d86a9f32693e] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 766.837481] env[62503]: DEBUG oslo_vmware.api [None req-0b764e2d-6aca-40d5-ae09-46fe33425281 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Task: {'id': task-1387748, 'name': Rename_Task} progress is 5%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.846321] env[62503]: DEBUG nova.network.neutron [None req-d48c1997-54b2-410a-b1fd-f52e7748bdfd tempest-ServerGroupTestJSON-1883967840 tempest-ServerGroupTestJSON-1883967840-project-member] [instance: be79632e-78ca-440a-92ef-d86a9f32693e] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 766.956699] env[62503]: DEBUG oslo_concurrency.lockutils [None req-fc106d5b-533d-42f2-9e23-d15b663f98ba tempest-ServerDiagnosticsNegativeTest-1726197908 tempest-ServerDiagnosticsNegativeTest-1726197908-project-member] Releasing lock "refresh_cache-529e6f8e-49b9-46a7-a09f-17238522f7bc" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 766.957131] env[62503]: DEBUG nova.compute.manager [None req-fc106d5b-533d-42f2-9e23-d15b663f98ba tempest-ServerDiagnosticsNegativeTest-1726197908 tempest-ServerDiagnosticsNegativeTest-1726197908-project-member] [instance: 529e6f8e-49b9-46a7-a09f-17238522f7bc] Start destroying the instance on the hypervisor. {{(pid=62503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3156}} [ 766.957332] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-fc106d5b-533d-42f2-9e23-d15b663f98ba tempest-ServerDiagnosticsNegativeTest-1726197908 tempest-ServerDiagnosticsNegativeTest-1726197908-project-member] [instance: 529e6f8e-49b9-46a7-a09f-17238522f7bc] Destroying instance {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 766.957823] env[62503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4f3e2cc0-80ba-4a7c-95b2-e5583db6b958 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.967835] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72d8a507-8852-491b-bff7-c02da4b26fdd {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.980104] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54a99a5f-bfa1-4e92-87ce-8b8e2a2bc11f {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.987222] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-102f06dd-3f1d-462b-ab37-19f90c83a15b {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.994940] env[62503]: WARNING nova.virt.vmwareapi.vmops [None req-fc106d5b-533d-42f2-9e23-d15b663f98ba tempest-ServerDiagnosticsNegativeTest-1726197908 tempest-ServerDiagnosticsNegativeTest-1726197908-project-member] [instance: 529e6f8e-49b9-46a7-a09f-17238522f7bc] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 529e6f8e-49b9-46a7-a09f-17238522f7bc could not be found. [ 766.995163] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-fc106d5b-533d-42f2-9e23-d15b663f98ba tempest-ServerDiagnosticsNegativeTest-1726197908 tempest-ServerDiagnosticsNegativeTest-1726197908-project-member] [instance: 529e6f8e-49b9-46a7-a09f-17238522f7bc] Instance destroyed {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 766.995354] env[62503]: INFO nova.compute.manager [None req-fc106d5b-533d-42f2-9e23-d15b663f98ba tempest-ServerDiagnosticsNegativeTest-1726197908 tempest-ServerDiagnosticsNegativeTest-1726197908-project-member] [instance: 529e6f8e-49b9-46a7-a09f-17238522f7bc] Took 0.04 seconds to destroy the instance on the hypervisor. [ 766.995583] env[62503]: DEBUG oslo.service.loopingcall [None req-fc106d5b-533d-42f2-9e23-d15b663f98ba tempest-ServerDiagnosticsNegativeTest-1726197908 tempest-ServerDiagnosticsNegativeTest-1726197908-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 766.996134] env[62503]: DEBUG nova.compute.manager [-] [instance: 529e6f8e-49b9-46a7-a09f-17238522f7bc] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 766.996272] env[62503]: DEBUG nova.network.neutron [-] [instance: 529e6f8e-49b9-46a7-a09f-17238522f7bc] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 767.021555] env[62503]: DEBUG nova.network.neutron [-] [instance: 529e6f8e-49b9-46a7-a09f-17238522f7bc] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 767.023330] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-519b4b54-5207-4d49-8096-074a93c2537f {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.031015] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5908bf1-8b5c-402c-a909-48d772a3ce72 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.044264] env[62503]: DEBUG nova.compute.provider_tree [None req-ecc5c12d-2a3e-430a-9c3c-15d45f1a4068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 767.218067] env[62503]: DEBUG nova.compute.manager [req-a33a28c3-1dad-41c9-ad5c-30df33df9e72 req-25fc9e1d-b5d5-46d0-a246-77a8d910e3e1 service nova] [instance: 529e6f8e-49b9-46a7-a09f-17238522f7bc] Received event network-changed-6db65cc5-b279-427b-812c-0c5fc0661b67 {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 767.218287] env[62503]: DEBUG nova.compute.manager [req-a33a28c3-1dad-41c9-ad5c-30df33df9e72 req-25fc9e1d-b5d5-46d0-a246-77a8d910e3e1 service nova] [instance: 529e6f8e-49b9-46a7-a09f-17238522f7bc] Refreshing instance network info cache due to event network-changed-6db65cc5-b279-427b-812c-0c5fc0661b67. {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11432}} [ 767.218508] env[62503]: DEBUG oslo_concurrency.lockutils [req-a33a28c3-1dad-41c9-ad5c-30df33df9e72 req-25fc9e1d-b5d5-46d0-a246-77a8d910e3e1 service nova] Acquiring lock "refresh_cache-529e6f8e-49b9-46a7-a09f-17238522f7bc" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 767.218655] env[62503]: DEBUG oslo_concurrency.lockutils [req-a33a28c3-1dad-41c9-ad5c-30df33df9e72 req-25fc9e1d-b5d5-46d0-a246-77a8d910e3e1 service nova] Acquired lock "refresh_cache-529e6f8e-49b9-46a7-a09f-17238522f7bc" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 767.218883] env[62503]: DEBUG nova.network.neutron [req-a33a28c3-1dad-41c9-ad5c-30df33df9e72 req-25fc9e1d-b5d5-46d0-a246-77a8d910e3e1 service nova] [instance: 529e6f8e-49b9-46a7-a09f-17238522f7bc] Refreshing network info cache for port 6db65cc5-b279-427b-812c-0c5fc0661b67 {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 767.338032] env[62503]: DEBUG oslo_vmware.api [None req-0b764e2d-6aca-40d5-ae09-46fe33425281 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Task: {'id': task-1387748, 'name': Rename_Task, 'duration_secs': 0.151444} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 767.338334] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b764e2d-6aca-40d5-ae09-46fe33425281 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] [instance: d4990c66-63d5-43b0-8187-2074c99ccde2] Powering on the VM {{(pid=62503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 767.339027] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d5dbb9b2-a391-4a1f-b04d-fb9436af243b {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.344940] env[62503]: DEBUG oslo_vmware.api [None req-0b764e2d-6aca-40d5-ae09-46fe33425281 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Waiting for the task: (returnval){ [ 767.344940] env[62503]: value = "task-1387749" [ 767.344940] env[62503]: _type = "Task" [ 767.344940] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 767.348100] env[62503]: DEBUG nova.network.neutron [None req-d48c1997-54b2-410a-b1fd-f52e7748bdfd tempest-ServerGroupTestJSON-1883967840 tempest-ServerGroupTestJSON-1883967840-project-member] [instance: be79632e-78ca-440a-92ef-d86a9f32693e] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 767.355631] env[62503]: DEBUG oslo_vmware.api [None req-0b764e2d-6aca-40d5-ae09-46fe33425281 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Task: {'id': task-1387749, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 767.527138] env[62503]: DEBUG nova.network.neutron [-] [instance: 529e6f8e-49b9-46a7-a09f-17238522f7bc] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 767.546595] env[62503]: DEBUG nova.scheduler.client.report [None req-ecc5c12d-2a3e-430a-9c3c-15d45f1a4068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 767.736913] env[62503]: DEBUG nova.network.neutron [req-a33a28c3-1dad-41c9-ad5c-30df33df9e72 req-25fc9e1d-b5d5-46d0-a246-77a8d910e3e1 service nova] [instance: 529e6f8e-49b9-46a7-a09f-17238522f7bc] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 767.798416] env[62503]: DEBUG nova.network.neutron [req-a33a28c3-1dad-41c9-ad5c-30df33df9e72 req-25fc9e1d-b5d5-46d0-a246-77a8d910e3e1 service nova] [instance: 529e6f8e-49b9-46a7-a09f-17238522f7bc] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 767.852109] env[62503]: INFO nova.compute.manager [None req-d48c1997-54b2-410a-b1fd-f52e7748bdfd tempest-ServerGroupTestJSON-1883967840 tempest-ServerGroupTestJSON-1883967840-project-member] [instance: be79632e-78ca-440a-92ef-d86a9f32693e] Took 1.02 seconds to deallocate network for instance. [ 767.857630] env[62503]: DEBUG oslo_vmware.api [None req-0b764e2d-6aca-40d5-ae09-46fe33425281 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Task: {'id': task-1387749, 'name': PowerOnVM_Task, 'duration_secs': 0.425258} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 767.858123] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b764e2d-6aca-40d5-ae09-46fe33425281 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] [instance: d4990c66-63d5-43b0-8187-2074c99ccde2] Powered on the VM {{(pid=62503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 767.858333] env[62503]: DEBUG nova.compute.manager [None req-0b764e2d-6aca-40d5-ae09-46fe33425281 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] [instance: d4990c66-63d5-43b0-8187-2074c99ccde2] Checking state {{(pid=62503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1794}} [ 767.859148] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb876620-0477-42d5-b3b1-181df427eaff {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.030073] env[62503]: INFO nova.compute.manager [-] [instance: 529e6f8e-49b9-46a7-a09f-17238522f7bc] Took 1.03 seconds to deallocate network for instance. [ 768.032252] env[62503]: DEBUG nova.compute.claims [None req-fc106d5b-533d-42f2-9e23-d15b663f98ba tempest-ServerDiagnosticsNegativeTest-1726197908 tempest-ServerDiagnosticsNegativeTest-1726197908-project-member] [instance: 529e6f8e-49b9-46a7-a09f-17238522f7bc] Aborting claim: {{(pid=62503) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 768.032434] env[62503]: DEBUG oslo_concurrency.lockutils [None req-fc106d5b-533d-42f2-9e23-d15b663f98ba tempest-ServerDiagnosticsNegativeTest-1726197908 tempest-ServerDiagnosticsNegativeTest-1726197908-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 768.051352] env[62503]: DEBUG oslo_concurrency.lockutils [None req-ecc5c12d-2a3e-430a-9c3c-15d45f1a4068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.353s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 768.051829] env[62503]: DEBUG nova.compute.manager [None req-ecc5c12d-2a3e-430a-9c3c-15d45f1a4068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: ad7badc9-cb11-4532-885a-28fb3d4de9ef] Start building networks asynchronously for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2832}} [ 768.054174] env[62503]: DEBUG oslo_concurrency.lockutils [None req-bf654c76-1dff-4311-afb8-80d00a5369c0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 16.743s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 768.301242] env[62503]: DEBUG oslo_concurrency.lockutils [req-a33a28c3-1dad-41c9-ad5c-30df33df9e72 req-25fc9e1d-b5d5-46d0-a246-77a8d910e3e1 service nova] Releasing lock "refresh_cache-529e6f8e-49b9-46a7-a09f-17238522f7bc" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 768.301573] env[62503]: DEBUG nova.compute.manager [req-a33a28c3-1dad-41c9-ad5c-30df33df9e72 req-25fc9e1d-b5d5-46d0-a246-77a8d910e3e1 service nova] [instance: 529e6f8e-49b9-46a7-a09f-17238522f7bc] Received event network-vif-deleted-6db65cc5-b279-427b-812c-0c5fc0661b67 {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 768.375739] env[62503]: DEBUG oslo_concurrency.lockutils [None req-0b764e2d-6aca-40d5-ae09-46fe33425281 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 768.562027] env[62503]: DEBUG nova.compute.utils [None req-ecc5c12d-2a3e-430a-9c3c-15d45f1a4068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Using /dev/sd instead of None {{(pid=62503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 768.563256] env[62503]: DEBUG nova.compute.manager [None req-ecc5c12d-2a3e-430a-9c3c-15d45f1a4068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: ad7badc9-cb11-4532-885a-28fb3d4de9ef] Allocating IP information in the background. {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 768.563449] env[62503]: DEBUG nova.network.neutron [None req-ecc5c12d-2a3e-430a-9c3c-15d45f1a4068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: ad7badc9-cb11-4532-885a-28fb3d4de9ef] allocate_for_instance() {{(pid=62503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 768.603738] env[62503]: DEBUG nova.policy [None req-ecc5c12d-2a3e-430a-9c3c-15d45f1a4068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '691809da402d4a29b085cfe3b22306b2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a833cd3315d0487cb3badd7b0d330a9a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62503) authorize /opt/stack/nova/nova/policy.py:201}} [ 768.709311] env[62503]: DEBUG oslo_concurrency.lockutils [None req-591ac4de-8a33-46d5-88d4-1e4a4b4bb606 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Acquiring lock "d4990c66-63d5-43b0-8187-2074c99ccde2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 768.709913] env[62503]: DEBUG oslo_concurrency.lockutils [None req-591ac4de-8a33-46d5-88d4-1e4a4b4bb606 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Lock "d4990c66-63d5-43b0-8187-2074c99ccde2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 768.710150] env[62503]: DEBUG oslo_concurrency.lockutils [None req-591ac4de-8a33-46d5-88d4-1e4a4b4bb606 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Acquiring lock "d4990c66-63d5-43b0-8187-2074c99ccde2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 768.711701] env[62503]: DEBUG oslo_concurrency.lockutils [None req-591ac4de-8a33-46d5-88d4-1e4a4b4bb606 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Lock "d4990c66-63d5-43b0-8187-2074c99ccde2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 768.711701] env[62503]: DEBUG oslo_concurrency.lockutils [None req-591ac4de-8a33-46d5-88d4-1e4a4b4bb606 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Lock "d4990c66-63d5-43b0-8187-2074c99ccde2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 768.713065] env[62503]: INFO nova.compute.manager [None req-591ac4de-8a33-46d5-88d4-1e4a4b4bb606 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] [instance: d4990c66-63d5-43b0-8187-2074c99ccde2] Terminating instance [ 768.714688] env[62503]: DEBUG oslo_concurrency.lockutils [None req-591ac4de-8a33-46d5-88d4-1e4a4b4bb606 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Acquiring lock "refresh_cache-d4990c66-63d5-43b0-8187-2074c99ccde2" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 768.714841] env[62503]: DEBUG oslo_concurrency.lockutils [None req-591ac4de-8a33-46d5-88d4-1e4a4b4bb606 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Acquired lock "refresh_cache-d4990c66-63d5-43b0-8187-2074c99ccde2" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 768.715503] env[62503]: DEBUG nova.network.neutron [None req-591ac4de-8a33-46d5-88d4-1e4a4b4bb606 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] [instance: d4990c66-63d5-43b0-8187-2074c99ccde2] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 768.831061] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b408c6e1-6db8-428a-8fab-a6798019c014 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.841334] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-687969b7-2142-4510-9d6e-0e2b16a19210 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.878734] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f522844-c757-4dbe-b514-37c391d1d094 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.886199] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-902f83c3-7145-482d-af1a-6160477a4cc3 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.899561] env[62503]: DEBUG nova.compute.provider_tree [None req-bf654c76-1dff-4311-afb8-80d00a5369c0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 768.901728] env[62503]: INFO nova.scheduler.client.report [None req-d48c1997-54b2-410a-b1fd-f52e7748bdfd tempest-ServerGroupTestJSON-1883967840 tempest-ServerGroupTestJSON-1883967840-project-member] Deleted allocations for instance be79632e-78ca-440a-92ef-d86a9f32693e [ 768.909612] env[62503]: DEBUG nova.network.neutron [None req-ecc5c12d-2a3e-430a-9c3c-15d45f1a4068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: ad7badc9-cb11-4532-885a-28fb3d4de9ef] Successfully created port: de38585f-ec61-4735-b9c7-0dcb8ebfaecc {{(pid=62503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 769.066372] env[62503]: DEBUG nova.compute.manager [None req-ecc5c12d-2a3e-430a-9c3c-15d45f1a4068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: ad7badc9-cb11-4532-885a-28fb3d4de9ef] Start building block device mappings for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2867}} [ 769.231485] env[62503]: DEBUG nova.network.neutron [None req-591ac4de-8a33-46d5-88d4-1e4a4b4bb606 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] [instance: d4990c66-63d5-43b0-8187-2074c99ccde2] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 769.279825] env[62503]: DEBUG nova.network.neutron [None req-591ac4de-8a33-46d5-88d4-1e4a4b4bb606 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] [instance: d4990c66-63d5-43b0-8187-2074c99ccde2] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 769.411180] env[62503]: DEBUG nova.scheduler.client.report [None req-bf654c76-1dff-4311-afb8-80d00a5369c0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 769.415215] env[62503]: DEBUG oslo_concurrency.lockutils [None req-d48c1997-54b2-410a-b1fd-f52e7748bdfd tempest-ServerGroupTestJSON-1883967840 tempest-ServerGroupTestJSON-1883967840-project-member] Lock "be79632e-78ca-440a-92ef-d86a9f32693e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 128.925s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 769.660051] env[62503]: DEBUG nova.compute.manager [req-ad5b2780-a464-4525-82ed-828de6bda111 req-ad74da28-e4b0-4f95-a637-96c872445638 service nova] [instance: ad7badc9-cb11-4532-885a-28fb3d4de9ef] Received event network-changed-de38585f-ec61-4735-b9c7-0dcb8ebfaecc {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 769.660051] env[62503]: DEBUG nova.compute.manager [req-ad5b2780-a464-4525-82ed-828de6bda111 req-ad74da28-e4b0-4f95-a637-96c872445638 service nova] [instance: ad7badc9-cb11-4532-885a-28fb3d4de9ef] Refreshing instance network info cache due to event network-changed-de38585f-ec61-4735-b9c7-0dcb8ebfaecc. {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11432}} [ 769.660300] env[62503]: DEBUG oslo_concurrency.lockutils [req-ad5b2780-a464-4525-82ed-828de6bda111 req-ad74da28-e4b0-4f95-a637-96c872445638 service nova] Acquiring lock "refresh_cache-ad7badc9-cb11-4532-885a-28fb3d4de9ef" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 769.660447] env[62503]: DEBUG oslo_concurrency.lockutils [req-ad5b2780-a464-4525-82ed-828de6bda111 req-ad74da28-e4b0-4f95-a637-96c872445638 service nova] Acquired lock "refresh_cache-ad7badc9-cb11-4532-885a-28fb3d4de9ef" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 769.660609] env[62503]: DEBUG nova.network.neutron [req-ad5b2780-a464-4525-82ed-828de6bda111 req-ad74da28-e4b0-4f95-a637-96c872445638 service nova] [instance: ad7badc9-cb11-4532-885a-28fb3d4de9ef] Refreshing network info cache for port de38585f-ec61-4735-b9c7-0dcb8ebfaecc {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 769.783221] env[62503]: DEBUG oslo_concurrency.lockutils [None req-591ac4de-8a33-46d5-88d4-1e4a4b4bb606 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Releasing lock "refresh_cache-d4990c66-63d5-43b0-8187-2074c99ccde2" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 769.783738] env[62503]: DEBUG nova.compute.manager [None req-591ac4de-8a33-46d5-88d4-1e4a4b4bb606 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] [instance: d4990c66-63d5-43b0-8187-2074c99ccde2] Start destroying the instance on the hypervisor. {{(pid=62503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3156}} [ 769.783949] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-591ac4de-8a33-46d5-88d4-1e4a4b4bb606 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] [instance: d4990c66-63d5-43b0-8187-2074c99ccde2] Destroying instance {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 769.784867] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3df0c5ef-d584-4287-86c4-ebe92961010b {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.793290] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-591ac4de-8a33-46d5-88d4-1e4a4b4bb606 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] [instance: d4990c66-63d5-43b0-8187-2074c99ccde2] Powering off the VM {{(pid=62503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 769.793526] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-eeb9e0c8-ded1-41e8-8e0a-8c5f5fcf2230 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.799114] env[62503]: DEBUG oslo_vmware.api [None req-591ac4de-8a33-46d5-88d4-1e4a4b4bb606 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Waiting for the task: (returnval){ [ 769.799114] env[62503]: value = "task-1387750" [ 769.799114] env[62503]: _type = "Task" [ 769.799114] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 769.809352] env[62503]: DEBUG oslo_vmware.api [None req-591ac4de-8a33-46d5-88d4-1e4a4b4bb606 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Task: {'id': task-1387750, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 769.874879] env[62503]: ERROR nova.compute.manager [None req-ecc5c12d-2a3e-430a-9c3c-15d45f1a4068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port de38585f-ec61-4735-b9c7-0dcb8ebfaecc, please check neutron logs for more information. [ 769.874879] env[62503]: ERROR nova.compute.manager Traceback (most recent call last): [ 769.874879] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 769.874879] env[62503]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 769.874879] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 769.874879] env[62503]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 769.874879] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 769.874879] env[62503]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 769.874879] env[62503]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 769.874879] env[62503]: ERROR nova.compute.manager self.force_reraise() [ 769.874879] env[62503]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 769.874879] env[62503]: ERROR nova.compute.manager raise self.value [ 769.874879] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 769.874879] env[62503]: ERROR nova.compute.manager updated_port = self._update_port( [ 769.874879] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 769.874879] env[62503]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 769.875562] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 769.875562] env[62503]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 769.875562] env[62503]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port de38585f-ec61-4735-b9c7-0dcb8ebfaecc, please check neutron logs for more information. [ 769.875562] env[62503]: ERROR nova.compute.manager [ 769.875562] env[62503]: Traceback (most recent call last): [ 769.875562] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 769.875562] env[62503]: listener.cb(fileno) [ 769.875562] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 769.875562] env[62503]: result = function(*args, **kwargs) [ 769.875562] env[62503]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 769.875562] env[62503]: return func(*args, **kwargs) [ 769.875562] env[62503]: File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 769.875562] env[62503]: raise e [ 769.875562] env[62503]: File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 769.875562] env[62503]: nwinfo = self.network_api.allocate_for_instance( [ 769.875562] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 769.875562] env[62503]: created_port_ids = self._update_ports_for_instance( [ 769.875562] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 769.875562] env[62503]: with excutils.save_and_reraise_exception(): [ 769.875562] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 769.875562] env[62503]: self.force_reraise() [ 769.875562] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 769.875562] env[62503]: raise self.value [ 769.875562] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 769.875562] env[62503]: updated_port = self._update_port( [ 769.875562] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 769.875562] env[62503]: _ensure_no_port_binding_failure(port) [ 769.875562] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 769.875562] env[62503]: raise exception.PortBindingFailed(port_id=port['id']) [ 769.876632] env[62503]: nova.exception.PortBindingFailed: Binding failed for port de38585f-ec61-4735-b9c7-0dcb8ebfaecc, please check neutron logs for more information. [ 769.876632] env[62503]: Removing descriptor: 16 [ 769.918380] env[62503]: DEBUG oslo_concurrency.lockutils [None req-bf654c76-1dff-4311-afb8-80d00a5369c0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.864s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 769.918631] env[62503]: ERROR nova.compute.manager [None req-bf654c76-1dff-4311-afb8-80d00a5369c0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: 47d67edd-0860-49a6-ab7e-0511cffb82ae] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port ba51a328-d1e0-4749-a304-d9a9c54c44d8, please check neutron logs for more information. [ 769.918631] env[62503]: ERROR nova.compute.manager [instance: 47d67edd-0860-49a6-ab7e-0511cffb82ae] Traceback (most recent call last): [ 769.918631] env[62503]: ERROR nova.compute.manager [instance: 47d67edd-0860-49a6-ab7e-0511cffb82ae] File "/opt/stack/nova/nova/compute/manager.py", line 2644, in _build_and_run_instance [ 769.918631] env[62503]: ERROR nova.compute.manager [instance: 47d67edd-0860-49a6-ab7e-0511cffb82ae] self.driver.spawn(context, instance, image_meta, [ 769.918631] env[62503]: ERROR nova.compute.manager [instance: 47d67edd-0860-49a6-ab7e-0511cffb82ae] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 769.918631] env[62503]: ERROR nova.compute.manager [instance: 47d67edd-0860-49a6-ab7e-0511cffb82ae] self._vmops.spawn(context, instance, image_meta, injected_files, [ 769.918631] env[62503]: ERROR nova.compute.manager [instance: 47d67edd-0860-49a6-ab7e-0511cffb82ae] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 769.918631] env[62503]: ERROR nova.compute.manager [instance: 47d67edd-0860-49a6-ab7e-0511cffb82ae] vm_ref = self.build_virtual_machine(instance, [ 769.918631] env[62503]: ERROR nova.compute.manager [instance: 47d67edd-0860-49a6-ab7e-0511cffb82ae] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 769.918631] env[62503]: ERROR nova.compute.manager [instance: 47d67edd-0860-49a6-ab7e-0511cffb82ae] vif_infos = vmwarevif.get_vif_info(self._session, [ 769.918631] env[62503]: ERROR nova.compute.manager [instance: 47d67edd-0860-49a6-ab7e-0511cffb82ae] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 769.919161] env[62503]: ERROR nova.compute.manager [instance: 47d67edd-0860-49a6-ab7e-0511cffb82ae] for vif in network_info: [ 769.919161] env[62503]: ERROR nova.compute.manager [instance: 47d67edd-0860-49a6-ab7e-0511cffb82ae] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 769.919161] env[62503]: ERROR nova.compute.manager [instance: 47d67edd-0860-49a6-ab7e-0511cffb82ae] return self._sync_wrapper(fn, *args, **kwargs) [ 769.919161] env[62503]: ERROR nova.compute.manager [instance: 47d67edd-0860-49a6-ab7e-0511cffb82ae] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 769.919161] env[62503]: ERROR nova.compute.manager [instance: 47d67edd-0860-49a6-ab7e-0511cffb82ae] self.wait() [ 769.919161] env[62503]: ERROR nova.compute.manager [instance: 47d67edd-0860-49a6-ab7e-0511cffb82ae] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 769.919161] env[62503]: ERROR nova.compute.manager [instance: 47d67edd-0860-49a6-ab7e-0511cffb82ae] self[:] = self._gt.wait() [ 769.919161] env[62503]: ERROR nova.compute.manager [instance: 47d67edd-0860-49a6-ab7e-0511cffb82ae] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 769.919161] env[62503]: ERROR nova.compute.manager [instance: 47d67edd-0860-49a6-ab7e-0511cffb82ae] return self._exit_event.wait() [ 769.919161] env[62503]: ERROR nova.compute.manager [instance: 47d67edd-0860-49a6-ab7e-0511cffb82ae] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 769.919161] env[62503]: ERROR nova.compute.manager [instance: 47d67edd-0860-49a6-ab7e-0511cffb82ae] result = hub.switch() [ 769.919161] env[62503]: ERROR nova.compute.manager [instance: 47d67edd-0860-49a6-ab7e-0511cffb82ae] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 769.919161] env[62503]: ERROR nova.compute.manager [instance: 47d67edd-0860-49a6-ab7e-0511cffb82ae] return self.greenlet.switch() [ 769.919799] env[62503]: ERROR nova.compute.manager [instance: 47d67edd-0860-49a6-ab7e-0511cffb82ae] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 769.919799] env[62503]: ERROR nova.compute.manager [instance: 47d67edd-0860-49a6-ab7e-0511cffb82ae] result = function(*args, **kwargs) [ 769.919799] env[62503]: ERROR nova.compute.manager [instance: 47d67edd-0860-49a6-ab7e-0511cffb82ae] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 769.919799] env[62503]: ERROR nova.compute.manager [instance: 47d67edd-0860-49a6-ab7e-0511cffb82ae] return func(*args, **kwargs) [ 769.919799] env[62503]: ERROR nova.compute.manager [instance: 47d67edd-0860-49a6-ab7e-0511cffb82ae] File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 769.919799] env[62503]: ERROR nova.compute.manager [instance: 47d67edd-0860-49a6-ab7e-0511cffb82ae] raise e [ 769.919799] env[62503]: ERROR nova.compute.manager [instance: 47d67edd-0860-49a6-ab7e-0511cffb82ae] File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 769.919799] env[62503]: ERROR nova.compute.manager [instance: 47d67edd-0860-49a6-ab7e-0511cffb82ae] nwinfo = self.network_api.allocate_for_instance( [ 769.919799] env[62503]: ERROR nova.compute.manager [instance: 47d67edd-0860-49a6-ab7e-0511cffb82ae] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 769.919799] env[62503]: ERROR nova.compute.manager [instance: 47d67edd-0860-49a6-ab7e-0511cffb82ae] created_port_ids = self._update_ports_for_instance( [ 769.919799] env[62503]: ERROR nova.compute.manager [instance: 47d67edd-0860-49a6-ab7e-0511cffb82ae] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 769.919799] env[62503]: ERROR nova.compute.manager [instance: 47d67edd-0860-49a6-ab7e-0511cffb82ae] with excutils.save_and_reraise_exception(): [ 769.919799] env[62503]: ERROR nova.compute.manager [instance: 47d67edd-0860-49a6-ab7e-0511cffb82ae] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 769.920310] env[62503]: ERROR nova.compute.manager [instance: 47d67edd-0860-49a6-ab7e-0511cffb82ae] self.force_reraise() [ 769.920310] env[62503]: ERROR nova.compute.manager [instance: 47d67edd-0860-49a6-ab7e-0511cffb82ae] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 769.920310] env[62503]: ERROR nova.compute.manager [instance: 47d67edd-0860-49a6-ab7e-0511cffb82ae] raise self.value [ 769.920310] env[62503]: ERROR nova.compute.manager [instance: 47d67edd-0860-49a6-ab7e-0511cffb82ae] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 769.920310] env[62503]: ERROR nova.compute.manager [instance: 47d67edd-0860-49a6-ab7e-0511cffb82ae] updated_port = self._update_port( [ 769.920310] env[62503]: ERROR nova.compute.manager [instance: 47d67edd-0860-49a6-ab7e-0511cffb82ae] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 769.920310] env[62503]: ERROR nova.compute.manager [instance: 47d67edd-0860-49a6-ab7e-0511cffb82ae] _ensure_no_port_binding_failure(port) [ 769.920310] env[62503]: ERROR nova.compute.manager [instance: 47d67edd-0860-49a6-ab7e-0511cffb82ae] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 769.920310] env[62503]: ERROR nova.compute.manager [instance: 47d67edd-0860-49a6-ab7e-0511cffb82ae] raise exception.PortBindingFailed(port_id=port['id']) [ 769.920310] env[62503]: ERROR nova.compute.manager [instance: 47d67edd-0860-49a6-ab7e-0511cffb82ae] nova.exception.PortBindingFailed: Binding failed for port ba51a328-d1e0-4749-a304-d9a9c54c44d8, please check neutron logs for more information. [ 769.920310] env[62503]: ERROR nova.compute.manager [instance: 47d67edd-0860-49a6-ab7e-0511cffb82ae] [ 769.920666] env[62503]: DEBUG nova.compute.utils [None req-bf654c76-1dff-4311-afb8-80d00a5369c0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: 47d67edd-0860-49a6-ab7e-0511cffb82ae] Binding failed for port ba51a328-d1e0-4749-a304-d9a9c54c44d8, please check neutron logs for more information. {{(pid=62503) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 769.922686] env[62503]: DEBUG oslo_concurrency.lockutils [None req-3dbf8d33-063c-4c40-92c7-9bf6b35e4526 tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.168s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 769.922686] env[62503]: INFO nova.compute.claims [None req-3dbf8d33-063c-4c40-92c7-9bf6b35e4526 tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] [instance: 32d4fda5-6d30-4416-b187-cf5548cb23bf] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 769.924807] env[62503]: DEBUG nova.compute.manager [None req-2d6004ed-f8ef-4f69-80bc-a0a595a2cd5a tempest-AttachVolumeTestJSON-1238928908 tempest-AttachVolumeTestJSON-1238928908-project-member] [instance: 9eee91c6-a949-453b-8ccd-ba986251ed27] Starting instance... {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 769.927432] env[62503]: DEBUG nova.compute.manager [None req-bf654c76-1dff-4311-afb8-80d00a5369c0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: 47d67edd-0860-49a6-ab7e-0511cffb82ae] Build of instance 47d67edd-0860-49a6-ab7e-0511cffb82ae was re-scheduled: Binding failed for port ba51a328-d1e0-4749-a304-d9a9c54c44d8, please check neutron logs for more information. {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2483}} [ 769.928993] env[62503]: DEBUG nova.compute.manager [None req-bf654c76-1dff-4311-afb8-80d00a5369c0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: 47d67edd-0860-49a6-ab7e-0511cffb82ae] Unplugging VIFs for instance {{(pid=62503) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3009}} [ 769.928993] env[62503]: DEBUG oslo_concurrency.lockutils [None req-bf654c76-1dff-4311-afb8-80d00a5369c0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Acquiring lock "refresh_cache-47d67edd-0860-49a6-ab7e-0511cffb82ae" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 769.928993] env[62503]: DEBUG oslo_concurrency.lockutils [None req-bf654c76-1dff-4311-afb8-80d00a5369c0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Acquired lock "refresh_cache-47d67edd-0860-49a6-ab7e-0511cffb82ae" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 769.928993] env[62503]: DEBUG nova.network.neutron [None req-bf654c76-1dff-4311-afb8-80d00a5369c0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: 47d67edd-0860-49a6-ab7e-0511cffb82ae] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 770.076137] env[62503]: DEBUG nova.compute.manager [None req-ecc5c12d-2a3e-430a-9c3c-15d45f1a4068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: ad7badc9-cb11-4532-885a-28fb3d4de9ef] Start spawning the instance on the hypervisor. {{(pid=62503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2641}} [ 770.101505] env[62503]: DEBUG nova.virt.hardware [None req-ecc5c12d-2a3e-430a-9c3c-15d45f1a4068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:26:20Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:26:03Z,direct_url=,disk_format='vmdk',id=8150ca02-f879-471d-8913-459408f127a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26d9ee75d25b4018b8bb1fb11c8bd98c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:26:04Z,virtual_size=,visibility=), allow threads: False {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 770.101753] env[62503]: DEBUG nova.virt.hardware [None req-ecc5c12d-2a3e-430a-9c3c-15d45f1a4068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Flavor limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 770.101911] env[62503]: DEBUG nova.virt.hardware [None req-ecc5c12d-2a3e-430a-9c3c-15d45f1a4068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Image limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 770.102119] env[62503]: DEBUG nova.virt.hardware [None req-ecc5c12d-2a3e-430a-9c3c-15d45f1a4068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Flavor pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 770.102268] env[62503]: DEBUG nova.virt.hardware [None req-ecc5c12d-2a3e-430a-9c3c-15d45f1a4068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Image pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 770.102414] env[62503]: DEBUG nova.virt.hardware [None req-ecc5c12d-2a3e-430a-9c3c-15d45f1a4068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 770.102617] env[62503]: DEBUG nova.virt.hardware [None req-ecc5c12d-2a3e-430a-9c3c-15d45f1a4068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 770.102782] env[62503]: DEBUG nova.virt.hardware [None req-ecc5c12d-2a3e-430a-9c3c-15d45f1a4068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 770.102957] env[62503]: DEBUG nova.virt.hardware [None req-ecc5c12d-2a3e-430a-9c3c-15d45f1a4068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Got 1 possible topologies {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 770.103133] env[62503]: DEBUG nova.virt.hardware [None req-ecc5c12d-2a3e-430a-9c3c-15d45f1a4068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 770.103303] env[62503]: DEBUG nova.virt.hardware [None req-ecc5c12d-2a3e-430a-9c3c-15d45f1a4068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 770.104469] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8477239a-18c8-4954-98c0-61ec2a8f3c42 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.113011] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fce03db9-549b-47d0-b9ab-56be0c4489e6 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.127206] env[62503]: ERROR nova.compute.manager [None req-ecc5c12d-2a3e-430a-9c3c-15d45f1a4068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: ad7badc9-cb11-4532-885a-28fb3d4de9ef] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port de38585f-ec61-4735-b9c7-0dcb8ebfaecc, please check neutron logs for more information. [ 770.127206] env[62503]: ERROR nova.compute.manager [instance: ad7badc9-cb11-4532-885a-28fb3d4de9ef] Traceback (most recent call last): [ 770.127206] env[62503]: ERROR nova.compute.manager [instance: ad7badc9-cb11-4532-885a-28fb3d4de9ef] File "/opt/stack/nova/nova/compute/manager.py", line 2897, in _build_resources [ 770.127206] env[62503]: ERROR nova.compute.manager [instance: ad7badc9-cb11-4532-885a-28fb3d4de9ef] yield resources [ 770.127206] env[62503]: ERROR nova.compute.manager [instance: ad7badc9-cb11-4532-885a-28fb3d4de9ef] File "/opt/stack/nova/nova/compute/manager.py", line 2644, in _build_and_run_instance [ 770.127206] env[62503]: ERROR nova.compute.manager [instance: ad7badc9-cb11-4532-885a-28fb3d4de9ef] self.driver.spawn(context, instance, image_meta, [ 770.127206] env[62503]: ERROR nova.compute.manager [instance: ad7badc9-cb11-4532-885a-28fb3d4de9ef] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 770.127206] env[62503]: ERROR nova.compute.manager [instance: ad7badc9-cb11-4532-885a-28fb3d4de9ef] self._vmops.spawn(context, instance, image_meta, injected_files, [ 770.127206] env[62503]: ERROR nova.compute.manager [instance: ad7badc9-cb11-4532-885a-28fb3d4de9ef] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 770.127206] env[62503]: ERROR nova.compute.manager [instance: ad7badc9-cb11-4532-885a-28fb3d4de9ef] vm_ref = self.build_virtual_machine(instance, [ 770.127206] env[62503]: ERROR nova.compute.manager [instance: ad7badc9-cb11-4532-885a-28fb3d4de9ef] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 770.127619] env[62503]: ERROR nova.compute.manager [instance: ad7badc9-cb11-4532-885a-28fb3d4de9ef] vif_infos = vmwarevif.get_vif_info(self._session, [ 770.127619] env[62503]: ERROR nova.compute.manager [instance: ad7badc9-cb11-4532-885a-28fb3d4de9ef] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 770.127619] env[62503]: ERROR nova.compute.manager [instance: ad7badc9-cb11-4532-885a-28fb3d4de9ef] for vif in network_info: [ 770.127619] env[62503]: ERROR nova.compute.manager [instance: ad7badc9-cb11-4532-885a-28fb3d4de9ef] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 770.127619] env[62503]: ERROR nova.compute.manager [instance: ad7badc9-cb11-4532-885a-28fb3d4de9ef] return self._sync_wrapper(fn, *args, **kwargs) [ 770.127619] env[62503]: ERROR nova.compute.manager [instance: ad7badc9-cb11-4532-885a-28fb3d4de9ef] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 770.127619] env[62503]: ERROR nova.compute.manager [instance: ad7badc9-cb11-4532-885a-28fb3d4de9ef] self.wait() [ 770.127619] env[62503]: ERROR nova.compute.manager [instance: ad7badc9-cb11-4532-885a-28fb3d4de9ef] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 770.127619] env[62503]: ERROR nova.compute.manager [instance: ad7badc9-cb11-4532-885a-28fb3d4de9ef] self[:] = self._gt.wait() [ 770.127619] env[62503]: ERROR nova.compute.manager [instance: ad7badc9-cb11-4532-885a-28fb3d4de9ef] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 770.127619] env[62503]: ERROR nova.compute.manager [instance: ad7badc9-cb11-4532-885a-28fb3d4de9ef] return self._exit_event.wait() [ 770.127619] env[62503]: ERROR nova.compute.manager [instance: ad7badc9-cb11-4532-885a-28fb3d4de9ef] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 770.127619] env[62503]: ERROR nova.compute.manager [instance: ad7badc9-cb11-4532-885a-28fb3d4de9ef] current.throw(*self._exc) [ 770.128063] env[62503]: ERROR nova.compute.manager [instance: ad7badc9-cb11-4532-885a-28fb3d4de9ef] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 770.128063] env[62503]: ERROR nova.compute.manager [instance: ad7badc9-cb11-4532-885a-28fb3d4de9ef] result = function(*args, **kwargs) [ 770.128063] env[62503]: ERROR nova.compute.manager [instance: ad7badc9-cb11-4532-885a-28fb3d4de9ef] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 770.128063] env[62503]: ERROR nova.compute.manager [instance: ad7badc9-cb11-4532-885a-28fb3d4de9ef] return func(*args, **kwargs) [ 770.128063] env[62503]: ERROR nova.compute.manager [instance: ad7badc9-cb11-4532-885a-28fb3d4de9ef] File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 770.128063] env[62503]: ERROR nova.compute.manager [instance: ad7badc9-cb11-4532-885a-28fb3d4de9ef] raise e [ 770.128063] env[62503]: ERROR nova.compute.manager [instance: ad7badc9-cb11-4532-885a-28fb3d4de9ef] File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 770.128063] env[62503]: ERROR nova.compute.manager [instance: ad7badc9-cb11-4532-885a-28fb3d4de9ef] nwinfo = self.network_api.allocate_for_instance( [ 770.128063] env[62503]: ERROR nova.compute.manager [instance: ad7badc9-cb11-4532-885a-28fb3d4de9ef] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 770.128063] env[62503]: ERROR nova.compute.manager [instance: ad7badc9-cb11-4532-885a-28fb3d4de9ef] created_port_ids = self._update_ports_for_instance( [ 770.128063] env[62503]: ERROR nova.compute.manager [instance: ad7badc9-cb11-4532-885a-28fb3d4de9ef] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 770.128063] env[62503]: ERROR nova.compute.manager [instance: ad7badc9-cb11-4532-885a-28fb3d4de9ef] with excutils.save_and_reraise_exception(): [ 770.128063] env[62503]: ERROR nova.compute.manager [instance: ad7badc9-cb11-4532-885a-28fb3d4de9ef] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 770.128938] env[62503]: ERROR nova.compute.manager [instance: ad7badc9-cb11-4532-885a-28fb3d4de9ef] self.force_reraise() [ 770.128938] env[62503]: ERROR nova.compute.manager [instance: ad7badc9-cb11-4532-885a-28fb3d4de9ef] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 770.128938] env[62503]: ERROR nova.compute.manager [instance: ad7badc9-cb11-4532-885a-28fb3d4de9ef] raise self.value [ 770.128938] env[62503]: ERROR nova.compute.manager [instance: ad7badc9-cb11-4532-885a-28fb3d4de9ef] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 770.128938] env[62503]: ERROR nova.compute.manager [instance: ad7badc9-cb11-4532-885a-28fb3d4de9ef] updated_port = self._update_port( [ 770.128938] env[62503]: ERROR nova.compute.manager [instance: ad7badc9-cb11-4532-885a-28fb3d4de9ef] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 770.128938] env[62503]: ERROR nova.compute.manager [instance: ad7badc9-cb11-4532-885a-28fb3d4de9ef] _ensure_no_port_binding_failure(port) [ 770.128938] env[62503]: ERROR nova.compute.manager [instance: ad7badc9-cb11-4532-885a-28fb3d4de9ef] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 770.128938] env[62503]: ERROR nova.compute.manager [instance: ad7badc9-cb11-4532-885a-28fb3d4de9ef] raise exception.PortBindingFailed(port_id=port['id']) [ 770.128938] env[62503]: ERROR nova.compute.manager [instance: ad7badc9-cb11-4532-885a-28fb3d4de9ef] nova.exception.PortBindingFailed: Binding failed for port de38585f-ec61-4735-b9c7-0dcb8ebfaecc, please check neutron logs for more information. [ 770.128938] env[62503]: ERROR nova.compute.manager [instance: ad7badc9-cb11-4532-885a-28fb3d4de9ef] [ 770.128938] env[62503]: INFO nova.compute.manager [None req-ecc5c12d-2a3e-430a-9c3c-15d45f1a4068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: ad7badc9-cb11-4532-885a-28fb3d4de9ef] Terminating instance [ 770.130826] env[62503]: DEBUG oslo_concurrency.lockutils [None req-ecc5c12d-2a3e-430a-9c3c-15d45f1a4068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Acquiring lock "refresh_cache-ad7badc9-cb11-4532-885a-28fb3d4de9ef" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 770.177247] env[62503]: DEBUG nova.network.neutron [req-ad5b2780-a464-4525-82ed-828de6bda111 req-ad74da28-e4b0-4f95-a637-96c872445638 service nova] [instance: ad7badc9-cb11-4532-885a-28fb3d4de9ef] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 770.260633] env[62503]: DEBUG nova.network.neutron [req-ad5b2780-a464-4525-82ed-828de6bda111 req-ad74da28-e4b0-4f95-a637-96c872445638 service nova] [instance: ad7badc9-cb11-4532-885a-28fb3d4de9ef] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 770.308573] env[62503]: DEBUG oslo_vmware.api [None req-591ac4de-8a33-46d5-88d4-1e4a4b4bb606 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Task: {'id': task-1387750, 'name': PowerOffVM_Task, 'duration_secs': 0.193169} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 770.308913] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-591ac4de-8a33-46d5-88d4-1e4a4b4bb606 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] [instance: d4990c66-63d5-43b0-8187-2074c99ccde2] Powered off the VM {{(pid=62503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 770.309127] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-591ac4de-8a33-46d5-88d4-1e4a4b4bb606 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] [instance: d4990c66-63d5-43b0-8187-2074c99ccde2] Unregistering the VM {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 770.309373] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-85b58fc4-846f-478c-8662-ed1bdf8254e1 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.334047] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-591ac4de-8a33-46d5-88d4-1e4a4b4bb606 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] [instance: d4990c66-63d5-43b0-8187-2074c99ccde2] Unregistered the VM {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 770.334228] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-591ac4de-8a33-46d5-88d4-1e4a4b4bb606 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] [instance: d4990c66-63d5-43b0-8187-2074c99ccde2] Deleting contents of the VM from datastore datastore2 {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 770.334419] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-591ac4de-8a33-46d5-88d4-1e4a4b4bb606 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Deleting the datastore file [datastore2] d4990c66-63d5-43b0-8187-2074c99ccde2 {{(pid=62503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 770.334662] env[62503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-92c00247-a65c-477a-94d5-bbadd3e4f2aa {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.340769] env[62503]: DEBUG oslo_vmware.api [None req-591ac4de-8a33-46d5-88d4-1e4a4b4bb606 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Waiting for the task: (returnval){ [ 770.340769] env[62503]: value = "task-1387752" [ 770.340769] env[62503]: _type = "Task" [ 770.340769] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 770.348531] env[62503]: DEBUG oslo_vmware.api [None req-591ac4de-8a33-46d5-88d4-1e4a4b4bb606 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Task: {'id': task-1387752, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 770.455292] env[62503]: DEBUG oslo_concurrency.lockutils [None req-2d6004ed-f8ef-4f69-80bc-a0a595a2cd5a tempest-AttachVolumeTestJSON-1238928908 tempest-AttachVolumeTestJSON-1238928908-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 770.457205] env[62503]: DEBUG nova.network.neutron [None req-bf654c76-1dff-4311-afb8-80d00a5369c0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: 47d67edd-0860-49a6-ab7e-0511cffb82ae] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 770.557372] env[62503]: DEBUG nova.network.neutron [None req-bf654c76-1dff-4311-afb8-80d00a5369c0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: 47d67edd-0860-49a6-ab7e-0511cffb82ae] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 770.763084] env[62503]: DEBUG oslo_concurrency.lockutils [req-ad5b2780-a464-4525-82ed-828de6bda111 req-ad74da28-e4b0-4f95-a637-96c872445638 service nova] Releasing lock "refresh_cache-ad7badc9-cb11-4532-885a-28fb3d4de9ef" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 770.764955] env[62503]: DEBUG oslo_concurrency.lockutils [None req-ecc5c12d-2a3e-430a-9c3c-15d45f1a4068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Acquired lock "refresh_cache-ad7badc9-cb11-4532-885a-28fb3d4de9ef" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 770.764955] env[62503]: DEBUG nova.network.neutron [None req-ecc5c12d-2a3e-430a-9c3c-15d45f1a4068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: ad7badc9-cb11-4532-885a-28fb3d4de9ef] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 770.850929] env[62503]: DEBUG oslo_vmware.api [None req-591ac4de-8a33-46d5-88d4-1e4a4b4bb606 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Task: {'id': task-1387752, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.109006} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 770.854037] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-591ac4de-8a33-46d5-88d4-1e4a4b4bb606 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Deleted the datastore file {{(pid=62503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 770.854037] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-591ac4de-8a33-46d5-88d4-1e4a4b4bb606 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] [instance: d4990c66-63d5-43b0-8187-2074c99ccde2] Deleted contents of the VM from datastore datastore2 {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 770.854037] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-591ac4de-8a33-46d5-88d4-1e4a4b4bb606 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] [instance: d4990c66-63d5-43b0-8187-2074c99ccde2] Instance destroyed {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 770.854037] env[62503]: INFO nova.compute.manager [None req-591ac4de-8a33-46d5-88d4-1e4a4b4bb606 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] [instance: d4990c66-63d5-43b0-8187-2074c99ccde2] Took 1.07 seconds to destroy the instance on the hypervisor. [ 770.854037] env[62503]: DEBUG oslo.service.loopingcall [None req-591ac4de-8a33-46d5-88d4-1e4a4b4bb606 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 770.854037] env[62503]: DEBUG nova.compute.manager [-] [instance: d4990c66-63d5-43b0-8187-2074c99ccde2] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 770.854294] env[62503]: DEBUG nova.network.neutron [-] [instance: d4990c66-63d5-43b0-8187-2074c99ccde2] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 770.866910] env[62503]: DEBUG nova.network.neutron [-] [instance: d4990c66-63d5-43b0-8187-2074c99ccde2] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 771.063222] env[62503]: DEBUG oslo_concurrency.lockutils [None req-bf654c76-1dff-4311-afb8-80d00a5369c0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Releasing lock "refresh_cache-47d67edd-0860-49a6-ab7e-0511cffb82ae" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 771.063467] env[62503]: DEBUG nova.compute.manager [None req-bf654c76-1dff-4311-afb8-80d00a5369c0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62503) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3032}} [ 771.063654] env[62503]: DEBUG nova.compute.manager [None req-bf654c76-1dff-4311-afb8-80d00a5369c0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: 47d67edd-0860-49a6-ab7e-0511cffb82ae] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 771.063824] env[62503]: DEBUG nova.network.neutron [None req-bf654c76-1dff-4311-afb8-80d00a5369c0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: 47d67edd-0860-49a6-ab7e-0511cffb82ae] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 771.085510] env[62503]: DEBUG nova.network.neutron [None req-bf654c76-1dff-4311-afb8-80d00a5369c0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: 47d67edd-0860-49a6-ab7e-0511cffb82ae] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 771.230352] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01a82905-7e66-4ff4-a6e6-074d1adda0ba {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.237779] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-455f1321-ff3c-4d24-9079-9ab7ffcf4ec9 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.271923] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-543fa0e3-5ae0-48ee-88fc-dae399ced0ad {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.282025] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41371acd-ac98-4adb-a491-67830a287455 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.292707] env[62503]: DEBUG nova.compute.provider_tree [None req-3dbf8d33-063c-4c40-92c7-9bf6b35e4526 tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 771.294543] env[62503]: DEBUG nova.network.neutron [None req-ecc5c12d-2a3e-430a-9c3c-15d45f1a4068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: ad7badc9-cb11-4532-885a-28fb3d4de9ef] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 771.373019] env[62503]: DEBUG nova.network.neutron [-] [instance: d4990c66-63d5-43b0-8187-2074c99ccde2] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 771.449141] env[62503]: DEBUG nova.network.neutron [None req-ecc5c12d-2a3e-430a-9c3c-15d45f1a4068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: ad7badc9-cb11-4532-885a-28fb3d4de9ef] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 771.587997] env[62503]: DEBUG nova.network.neutron [None req-bf654c76-1dff-4311-afb8-80d00a5369c0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: 47d67edd-0860-49a6-ab7e-0511cffb82ae] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 771.685682] env[62503]: DEBUG nova.compute.manager [req-98879397-9cda-45c5-b9b9-94fa23ddd713 req-c9367562-52fb-43f0-ade1-da015c9f66a2 service nova] [instance: ad7badc9-cb11-4532-885a-28fb3d4de9ef] Received event network-vif-deleted-de38585f-ec61-4735-b9c7-0dcb8ebfaecc {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 771.798071] env[62503]: DEBUG nova.scheduler.client.report [None req-3dbf8d33-063c-4c40-92c7-9bf6b35e4526 tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 771.876049] env[62503]: INFO nova.compute.manager [-] [instance: d4990c66-63d5-43b0-8187-2074c99ccde2] Took 1.02 seconds to deallocate network for instance. [ 771.953276] env[62503]: DEBUG oslo_concurrency.lockutils [None req-ecc5c12d-2a3e-430a-9c3c-15d45f1a4068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Releasing lock "refresh_cache-ad7badc9-cb11-4532-885a-28fb3d4de9ef" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 771.953714] env[62503]: DEBUG nova.compute.manager [None req-ecc5c12d-2a3e-430a-9c3c-15d45f1a4068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: ad7badc9-cb11-4532-885a-28fb3d4de9ef] Start destroying the instance on the hypervisor. {{(pid=62503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3156}} [ 771.953904] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-ecc5c12d-2a3e-430a-9c3c-15d45f1a4068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: ad7badc9-cb11-4532-885a-28fb3d4de9ef] Destroying instance {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 771.954216] env[62503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3a78827a-1a41-4e43-96f2-e1326bf34803 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.963231] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6704afe7-b423-4e74-89ce-015d18fe61dd {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.986622] env[62503]: WARNING nova.virt.vmwareapi.vmops [None req-ecc5c12d-2a3e-430a-9c3c-15d45f1a4068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: ad7badc9-cb11-4532-885a-28fb3d4de9ef] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance ad7badc9-cb11-4532-885a-28fb3d4de9ef could not be found. [ 771.986854] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-ecc5c12d-2a3e-430a-9c3c-15d45f1a4068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: ad7badc9-cb11-4532-885a-28fb3d4de9ef] Instance destroyed {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 771.987046] env[62503]: INFO nova.compute.manager [None req-ecc5c12d-2a3e-430a-9c3c-15d45f1a4068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: ad7badc9-cb11-4532-885a-28fb3d4de9ef] Took 0.03 seconds to destroy the instance on the hypervisor. [ 771.987297] env[62503]: DEBUG oslo.service.loopingcall [None req-ecc5c12d-2a3e-430a-9c3c-15d45f1a4068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 771.987516] env[62503]: DEBUG nova.compute.manager [-] [instance: ad7badc9-cb11-4532-885a-28fb3d4de9ef] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 771.987610] env[62503]: DEBUG nova.network.neutron [-] [instance: ad7badc9-cb11-4532-885a-28fb3d4de9ef] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 772.003893] env[62503]: DEBUG nova.network.neutron [-] [instance: ad7badc9-cb11-4532-885a-28fb3d4de9ef] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 772.094695] env[62503]: INFO nova.compute.manager [None req-bf654c76-1dff-4311-afb8-80d00a5369c0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: 47d67edd-0860-49a6-ab7e-0511cffb82ae] Took 1.03 seconds to deallocate network for instance. [ 772.303311] env[62503]: DEBUG oslo_concurrency.lockutils [None req-3dbf8d33-063c-4c40-92c7-9bf6b35e4526 tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.382s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 772.303829] env[62503]: DEBUG nova.compute.manager [None req-3dbf8d33-063c-4c40-92c7-9bf6b35e4526 tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] [instance: 32d4fda5-6d30-4416-b187-cf5548cb23bf] Start building networks asynchronously for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2832}} [ 772.306384] env[62503]: DEBUG oslo_concurrency.lockutils [None req-35b849ab-bc6b-4b84-91df-53a0636c028a tempest-ServerMetadataNegativeTestJSON-362530580 tempest-ServerMetadataNegativeTestJSON-362530580-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.278s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 772.381805] env[62503]: DEBUG oslo_concurrency.lockutils [None req-591ac4de-8a33-46d5-88d4-1e4a4b4bb606 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 772.507536] env[62503]: DEBUG nova.network.neutron [-] [instance: ad7badc9-cb11-4532-885a-28fb3d4de9ef] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 772.811857] env[62503]: DEBUG nova.compute.utils [None req-3dbf8d33-063c-4c40-92c7-9bf6b35e4526 tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] Using /dev/sd instead of None {{(pid=62503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 772.817441] env[62503]: DEBUG nova.compute.manager [None req-3dbf8d33-063c-4c40-92c7-9bf6b35e4526 tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] [instance: 32d4fda5-6d30-4416-b187-cf5548cb23bf] Allocating IP information in the background. {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 772.817441] env[62503]: DEBUG nova.network.neutron [None req-3dbf8d33-063c-4c40-92c7-9bf6b35e4526 tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] [instance: 32d4fda5-6d30-4416-b187-cf5548cb23bf] allocate_for_instance() {{(pid=62503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 772.874659] env[62503]: DEBUG nova.policy [None req-3dbf8d33-063c-4c40-92c7-9bf6b35e4526 tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '13fbe651215a435384443e46e225ebaf', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a5f8c12d03a0446988d5335c00cee0ff', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62503) authorize /opt/stack/nova/nova/policy.py:201}} [ 773.009749] env[62503]: INFO nova.compute.manager [-] [instance: ad7badc9-cb11-4532-885a-28fb3d4de9ef] Took 1.02 seconds to deallocate network for instance. [ 773.012197] env[62503]: DEBUG nova.compute.claims [None req-ecc5c12d-2a3e-430a-9c3c-15d45f1a4068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: ad7badc9-cb11-4532-885a-28fb3d4de9ef] Aborting claim: {{(pid=62503) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 773.012416] env[62503]: DEBUG oslo_concurrency.lockutils [None req-ecc5c12d-2a3e-430a-9c3c-15d45f1a4068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 773.094018] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40c8d66c-b9bc-4f57-ade4-57b3416f78a9 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.100842] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d95891fe-fcac-4f56-93b1-73b85222b1ca {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.135584] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1863159-eeaf-4a45-a39b-5938ca725643 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.139069] env[62503]: INFO nova.scheduler.client.report [None req-bf654c76-1dff-4311-afb8-80d00a5369c0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Deleted allocations for instance 47d67edd-0860-49a6-ab7e-0511cffb82ae [ 773.150119] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84ce04fe-9b0d-4f21-b9cb-e0300bbad344 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.163953] env[62503]: DEBUG nova.compute.provider_tree [None req-35b849ab-bc6b-4b84-91df-53a0636c028a tempest-ServerMetadataNegativeTestJSON-362530580 tempest-ServerMetadataNegativeTestJSON-362530580-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 773.165953] env[62503]: DEBUG nova.network.neutron [None req-3dbf8d33-063c-4c40-92c7-9bf6b35e4526 tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] [instance: 32d4fda5-6d30-4416-b187-cf5548cb23bf] Successfully created port: e7c06575-72b9-4935-9724-8bb24022dfa9 {{(pid=62503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 773.316992] env[62503]: DEBUG nova.compute.manager [None req-3dbf8d33-063c-4c40-92c7-9bf6b35e4526 tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] [instance: 32d4fda5-6d30-4416-b187-cf5548cb23bf] Start building block device mappings for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2867}} [ 773.481935] env[62503]: DEBUG nova.network.neutron [None req-3dbf8d33-063c-4c40-92c7-9bf6b35e4526 tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] [instance: 32d4fda5-6d30-4416-b187-cf5548cb23bf] Successfully created port: 2f14b37c-f48e-4d3e-a95f-7851422d7278 {{(pid=62503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 773.648217] env[62503]: DEBUG oslo_concurrency.lockutils [None req-bf654c76-1dff-4311-afb8-80d00a5369c0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Lock "47d67edd-0860-49a6-ab7e-0511cffb82ae" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 132.453s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 773.671203] env[62503]: DEBUG nova.scheduler.client.report [None req-35b849ab-bc6b-4b84-91df-53a0636c028a tempest-ServerMetadataNegativeTestJSON-362530580 tempest-ServerMetadataNegativeTestJSON-362530580-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 773.840516] env[62503]: DEBUG nova.network.neutron [None req-3dbf8d33-063c-4c40-92c7-9bf6b35e4526 tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] [instance: 32d4fda5-6d30-4416-b187-cf5548cb23bf] Successfully created port: 7b0c9ed2-e4ca-4200-8d5a-8b63120efc20 {{(pid=62503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 774.150209] env[62503]: DEBUG nova.compute.manager [None req-795b6309-d5a9-40f9-8a07-369f392b468e tempest-ServersTestManualDisk-925778780 tempest-ServersTestManualDisk-925778780-project-member] [instance: f6f17748-815c-417f-bce6-3bc97f23b637] Starting instance... {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 774.176104] env[62503]: DEBUG oslo_concurrency.lockutils [None req-35b849ab-bc6b-4b84-91df-53a0636c028a tempest-ServerMetadataNegativeTestJSON-362530580 tempest-ServerMetadataNegativeTestJSON-362530580-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.869s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 774.179068] env[62503]: ERROR nova.compute.manager [None req-35b849ab-bc6b-4b84-91df-53a0636c028a tempest-ServerMetadataNegativeTestJSON-362530580 tempest-ServerMetadataNegativeTestJSON-362530580-project-member] [instance: 48d9b18d-04b5-44e4-809e-383819d39418] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 1f509a03-3771-46fe-a0f9-4aed7d32e203, please check neutron logs for more information. [ 774.179068] env[62503]: ERROR nova.compute.manager [instance: 48d9b18d-04b5-44e4-809e-383819d39418] Traceback (most recent call last): [ 774.179068] env[62503]: ERROR nova.compute.manager [instance: 48d9b18d-04b5-44e4-809e-383819d39418] File "/opt/stack/nova/nova/compute/manager.py", line 2644, in _build_and_run_instance [ 774.179068] env[62503]: ERROR nova.compute.manager [instance: 48d9b18d-04b5-44e4-809e-383819d39418] self.driver.spawn(context, instance, image_meta, [ 774.179068] env[62503]: ERROR nova.compute.manager [instance: 48d9b18d-04b5-44e4-809e-383819d39418] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 774.179068] env[62503]: ERROR nova.compute.manager [instance: 48d9b18d-04b5-44e4-809e-383819d39418] self._vmops.spawn(context, instance, image_meta, injected_files, [ 774.179068] env[62503]: ERROR nova.compute.manager [instance: 48d9b18d-04b5-44e4-809e-383819d39418] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 774.179068] env[62503]: ERROR nova.compute.manager [instance: 48d9b18d-04b5-44e4-809e-383819d39418] vm_ref = self.build_virtual_machine(instance, [ 774.179068] env[62503]: ERROR nova.compute.manager [instance: 48d9b18d-04b5-44e4-809e-383819d39418] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 774.179068] env[62503]: ERROR nova.compute.manager [instance: 48d9b18d-04b5-44e4-809e-383819d39418] vif_infos = vmwarevif.get_vif_info(self._session, [ 774.179068] env[62503]: ERROR nova.compute.manager [instance: 48d9b18d-04b5-44e4-809e-383819d39418] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 774.180095] env[62503]: ERROR nova.compute.manager [instance: 48d9b18d-04b5-44e4-809e-383819d39418] for vif in network_info: [ 774.180095] env[62503]: ERROR nova.compute.manager [instance: 48d9b18d-04b5-44e4-809e-383819d39418] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 774.180095] env[62503]: ERROR nova.compute.manager [instance: 48d9b18d-04b5-44e4-809e-383819d39418] return self._sync_wrapper(fn, *args, **kwargs) [ 774.180095] env[62503]: ERROR nova.compute.manager [instance: 48d9b18d-04b5-44e4-809e-383819d39418] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 774.180095] env[62503]: ERROR nova.compute.manager [instance: 48d9b18d-04b5-44e4-809e-383819d39418] self.wait() [ 774.180095] env[62503]: ERROR nova.compute.manager [instance: 48d9b18d-04b5-44e4-809e-383819d39418] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 774.180095] env[62503]: ERROR nova.compute.manager [instance: 48d9b18d-04b5-44e4-809e-383819d39418] self[:] = self._gt.wait() [ 774.180095] env[62503]: ERROR nova.compute.manager [instance: 48d9b18d-04b5-44e4-809e-383819d39418] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 774.180095] env[62503]: ERROR nova.compute.manager [instance: 48d9b18d-04b5-44e4-809e-383819d39418] return self._exit_event.wait() [ 774.180095] env[62503]: ERROR nova.compute.manager [instance: 48d9b18d-04b5-44e4-809e-383819d39418] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 774.180095] env[62503]: ERROR nova.compute.manager [instance: 48d9b18d-04b5-44e4-809e-383819d39418] current.throw(*self._exc) [ 774.180095] env[62503]: ERROR nova.compute.manager [instance: 48d9b18d-04b5-44e4-809e-383819d39418] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 774.180095] env[62503]: ERROR nova.compute.manager [instance: 48d9b18d-04b5-44e4-809e-383819d39418] result = function(*args, **kwargs) [ 774.180555] env[62503]: ERROR nova.compute.manager [instance: 48d9b18d-04b5-44e4-809e-383819d39418] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 774.180555] env[62503]: ERROR nova.compute.manager [instance: 48d9b18d-04b5-44e4-809e-383819d39418] return func(*args, **kwargs) [ 774.180555] env[62503]: ERROR nova.compute.manager [instance: 48d9b18d-04b5-44e4-809e-383819d39418] File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 774.180555] env[62503]: ERROR nova.compute.manager [instance: 48d9b18d-04b5-44e4-809e-383819d39418] raise e [ 774.180555] env[62503]: ERROR nova.compute.manager [instance: 48d9b18d-04b5-44e4-809e-383819d39418] File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 774.180555] env[62503]: ERROR nova.compute.manager [instance: 48d9b18d-04b5-44e4-809e-383819d39418] nwinfo = self.network_api.allocate_for_instance( [ 774.180555] env[62503]: ERROR nova.compute.manager [instance: 48d9b18d-04b5-44e4-809e-383819d39418] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 774.180555] env[62503]: ERROR nova.compute.manager [instance: 48d9b18d-04b5-44e4-809e-383819d39418] created_port_ids = self._update_ports_for_instance( [ 774.180555] env[62503]: ERROR nova.compute.manager [instance: 48d9b18d-04b5-44e4-809e-383819d39418] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 774.180555] env[62503]: ERROR nova.compute.manager [instance: 48d9b18d-04b5-44e4-809e-383819d39418] with excutils.save_and_reraise_exception(): [ 774.180555] env[62503]: ERROR nova.compute.manager [instance: 48d9b18d-04b5-44e4-809e-383819d39418] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 774.180555] env[62503]: ERROR nova.compute.manager [instance: 48d9b18d-04b5-44e4-809e-383819d39418] self.force_reraise() [ 774.180555] env[62503]: ERROR nova.compute.manager [instance: 48d9b18d-04b5-44e4-809e-383819d39418] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 774.180960] env[62503]: ERROR nova.compute.manager [instance: 48d9b18d-04b5-44e4-809e-383819d39418] raise self.value [ 774.180960] env[62503]: ERROR nova.compute.manager [instance: 48d9b18d-04b5-44e4-809e-383819d39418] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 774.180960] env[62503]: ERROR nova.compute.manager [instance: 48d9b18d-04b5-44e4-809e-383819d39418] updated_port = self._update_port( [ 774.180960] env[62503]: ERROR nova.compute.manager [instance: 48d9b18d-04b5-44e4-809e-383819d39418] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 774.180960] env[62503]: ERROR nova.compute.manager [instance: 48d9b18d-04b5-44e4-809e-383819d39418] _ensure_no_port_binding_failure(port) [ 774.180960] env[62503]: ERROR nova.compute.manager [instance: 48d9b18d-04b5-44e4-809e-383819d39418] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 774.180960] env[62503]: ERROR nova.compute.manager [instance: 48d9b18d-04b5-44e4-809e-383819d39418] raise exception.PortBindingFailed(port_id=port['id']) [ 774.180960] env[62503]: ERROR nova.compute.manager [instance: 48d9b18d-04b5-44e4-809e-383819d39418] nova.exception.PortBindingFailed: Binding failed for port 1f509a03-3771-46fe-a0f9-4aed7d32e203, please check neutron logs for more information. [ 774.180960] env[62503]: ERROR nova.compute.manager [instance: 48d9b18d-04b5-44e4-809e-383819d39418] [ 774.187860] env[62503]: DEBUG nova.compute.utils [None req-35b849ab-bc6b-4b84-91df-53a0636c028a tempest-ServerMetadataNegativeTestJSON-362530580 tempest-ServerMetadataNegativeTestJSON-362530580-project-member] [instance: 48d9b18d-04b5-44e4-809e-383819d39418] Binding failed for port 1f509a03-3771-46fe-a0f9-4aed7d32e203, please check neutron logs for more information. {{(pid=62503) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 774.189973] env[62503]: DEBUG oslo_concurrency.lockutils [None req-bed06986-6337-47b4-a3da-32c888b74905 tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.649s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 774.192041] env[62503]: INFO nova.compute.claims [None req-bed06986-6337-47b4-a3da-32c888b74905 tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] [instance: a4b600d2-b411-4957-92cb-7e8e462fde1d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 774.196251] env[62503]: DEBUG nova.compute.manager [None req-35b849ab-bc6b-4b84-91df-53a0636c028a tempest-ServerMetadataNegativeTestJSON-362530580 tempest-ServerMetadataNegativeTestJSON-362530580-project-member] [instance: 48d9b18d-04b5-44e4-809e-383819d39418] Build of instance 48d9b18d-04b5-44e4-809e-383819d39418 was re-scheduled: Binding failed for port 1f509a03-3771-46fe-a0f9-4aed7d32e203, please check neutron logs for more information. {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2483}} [ 774.196730] env[62503]: DEBUG nova.compute.manager [None req-35b849ab-bc6b-4b84-91df-53a0636c028a tempest-ServerMetadataNegativeTestJSON-362530580 tempest-ServerMetadataNegativeTestJSON-362530580-project-member] [instance: 48d9b18d-04b5-44e4-809e-383819d39418] Unplugging VIFs for instance {{(pid=62503) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3009}} [ 774.200616] env[62503]: DEBUG oslo_concurrency.lockutils [None req-35b849ab-bc6b-4b84-91df-53a0636c028a tempest-ServerMetadataNegativeTestJSON-362530580 tempest-ServerMetadataNegativeTestJSON-362530580-project-member] Acquiring lock "refresh_cache-48d9b18d-04b5-44e4-809e-383819d39418" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 774.200844] env[62503]: DEBUG oslo_concurrency.lockutils [None req-35b849ab-bc6b-4b84-91df-53a0636c028a tempest-ServerMetadataNegativeTestJSON-362530580 tempest-ServerMetadataNegativeTestJSON-362530580-project-member] Acquired lock "refresh_cache-48d9b18d-04b5-44e4-809e-383819d39418" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 774.200975] env[62503]: DEBUG nova.network.neutron [None req-35b849ab-bc6b-4b84-91df-53a0636c028a tempest-ServerMetadataNegativeTestJSON-362530580 tempest-ServerMetadataNegativeTestJSON-362530580-project-member] [instance: 48d9b18d-04b5-44e4-809e-383819d39418] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 774.330554] env[62503]: DEBUG nova.compute.manager [None req-3dbf8d33-063c-4c40-92c7-9bf6b35e4526 tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] [instance: 32d4fda5-6d30-4416-b187-cf5548cb23bf] Start spawning the instance on the hypervisor. {{(pid=62503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2641}} [ 774.354861] env[62503]: DEBUG nova.virt.hardware [None req-3dbf8d33-063c-4c40-92c7-9bf6b35e4526 tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:26:20Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:26:03Z,direct_url=,disk_format='vmdk',id=8150ca02-f879-471d-8913-459408f127a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26d9ee75d25b4018b8bb1fb11c8bd98c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:26:04Z,virtual_size=,visibility=), allow threads: False {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 774.355135] env[62503]: DEBUG nova.virt.hardware [None req-3dbf8d33-063c-4c40-92c7-9bf6b35e4526 tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] Flavor limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 774.355299] env[62503]: DEBUG nova.virt.hardware [None req-3dbf8d33-063c-4c40-92c7-9bf6b35e4526 tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] Image limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 774.355545] env[62503]: DEBUG nova.virt.hardware [None req-3dbf8d33-063c-4c40-92c7-9bf6b35e4526 tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] Flavor pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 774.356084] env[62503]: DEBUG nova.virt.hardware [None req-3dbf8d33-063c-4c40-92c7-9bf6b35e4526 tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] Image pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 774.356393] env[62503]: DEBUG nova.virt.hardware [None req-3dbf8d33-063c-4c40-92c7-9bf6b35e4526 tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 774.356627] env[62503]: DEBUG nova.virt.hardware [None req-3dbf8d33-063c-4c40-92c7-9bf6b35e4526 tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 774.356868] env[62503]: DEBUG nova.virt.hardware [None req-3dbf8d33-063c-4c40-92c7-9bf6b35e4526 tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 774.357136] env[62503]: DEBUG nova.virt.hardware [None req-3dbf8d33-063c-4c40-92c7-9bf6b35e4526 tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] Got 1 possible topologies {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 774.357371] env[62503]: DEBUG nova.virt.hardware [None req-3dbf8d33-063c-4c40-92c7-9bf6b35e4526 tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 774.357599] env[62503]: DEBUG nova.virt.hardware [None req-3dbf8d33-063c-4c40-92c7-9bf6b35e4526 tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 774.358541] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37584485-35d7-4e59-9847-65aa65e917b2 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.368070] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8113ccf9-4842-4602-9462-48c70f80b551 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.681356] env[62503]: DEBUG oslo_concurrency.lockutils [None req-795b6309-d5a9-40f9-8a07-369f392b468e tempest-ServersTestManualDisk-925778780 tempest-ServersTestManualDisk-925778780-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 774.730508] env[62503]: DEBUG nova.network.neutron [None req-35b849ab-bc6b-4b84-91df-53a0636c028a tempest-ServerMetadataNegativeTestJSON-362530580 tempest-ServerMetadataNegativeTestJSON-362530580-project-member] [instance: 48d9b18d-04b5-44e4-809e-383819d39418] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 774.843839] env[62503]: DEBUG nova.network.neutron [None req-35b849ab-bc6b-4b84-91df-53a0636c028a tempest-ServerMetadataNegativeTestJSON-362530580 tempest-ServerMetadataNegativeTestJSON-362530580-project-member] [instance: 48d9b18d-04b5-44e4-809e-383819d39418] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 775.280690] env[62503]: DEBUG oslo_concurrency.lockutils [None req-578ece02-e979-44e0-9e11-eefd26357b90 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Acquiring lock "c5f2cc73-6bcd-4422-890b-3299d4cf4534" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 775.280879] env[62503]: DEBUG oslo_concurrency.lockutils [None req-578ece02-e979-44e0-9e11-eefd26357b90 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Lock "c5f2cc73-6bcd-4422-890b-3299d4cf4534" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 775.348939] env[62503]: DEBUG oslo_concurrency.lockutils [None req-35b849ab-bc6b-4b84-91df-53a0636c028a tempest-ServerMetadataNegativeTestJSON-362530580 tempest-ServerMetadataNegativeTestJSON-362530580-project-member] Releasing lock "refresh_cache-48d9b18d-04b5-44e4-809e-383819d39418" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 775.349635] env[62503]: DEBUG nova.compute.manager [None req-35b849ab-bc6b-4b84-91df-53a0636c028a tempest-ServerMetadataNegativeTestJSON-362530580 tempest-ServerMetadataNegativeTestJSON-362530580-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62503) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3032}} [ 775.350141] env[62503]: DEBUG nova.compute.manager [None req-35b849ab-bc6b-4b84-91df-53a0636c028a tempest-ServerMetadataNegativeTestJSON-362530580 tempest-ServerMetadataNegativeTestJSON-362530580-project-member] [instance: 48d9b18d-04b5-44e4-809e-383819d39418] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 775.350141] env[62503]: DEBUG nova.network.neutron [None req-35b849ab-bc6b-4b84-91df-53a0636c028a tempest-ServerMetadataNegativeTestJSON-362530580 tempest-ServerMetadataNegativeTestJSON-362530580-project-member] [instance: 48d9b18d-04b5-44e4-809e-383819d39418] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 775.369966] env[62503]: DEBUG nova.compute.manager [req-2c3c64eb-2ec2-457e-a33d-9e959246cf96 req-cdb79318-5aa8-4817-b647-7aa65e102280 service nova] [instance: 32d4fda5-6d30-4416-b187-cf5548cb23bf] Received event network-changed-e7c06575-72b9-4935-9724-8bb24022dfa9 {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 775.369966] env[62503]: DEBUG nova.compute.manager [req-2c3c64eb-2ec2-457e-a33d-9e959246cf96 req-cdb79318-5aa8-4817-b647-7aa65e102280 service nova] [instance: 32d4fda5-6d30-4416-b187-cf5548cb23bf] Refreshing instance network info cache due to event network-changed-e7c06575-72b9-4935-9724-8bb24022dfa9. {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11432}} [ 775.370274] env[62503]: DEBUG oslo_concurrency.lockutils [req-2c3c64eb-2ec2-457e-a33d-9e959246cf96 req-cdb79318-5aa8-4817-b647-7aa65e102280 service nova] Acquiring lock "refresh_cache-32d4fda5-6d30-4416-b187-cf5548cb23bf" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 775.370354] env[62503]: DEBUG oslo_concurrency.lockutils [req-2c3c64eb-2ec2-457e-a33d-9e959246cf96 req-cdb79318-5aa8-4817-b647-7aa65e102280 service nova] Acquired lock "refresh_cache-32d4fda5-6d30-4416-b187-cf5548cb23bf" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 775.371073] env[62503]: DEBUG nova.network.neutron [req-2c3c64eb-2ec2-457e-a33d-9e959246cf96 req-cdb79318-5aa8-4817-b647-7aa65e102280 service nova] [instance: 32d4fda5-6d30-4416-b187-cf5548cb23bf] Refreshing network info cache for port e7c06575-72b9-4935-9724-8bb24022dfa9 {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 775.377177] env[62503]: DEBUG nova.network.neutron [None req-35b849ab-bc6b-4b84-91df-53a0636c028a tempest-ServerMetadataNegativeTestJSON-362530580 tempest-ServerMetadataNegativeTestJSON-362530580-project-member] [instance: 48d9b18d-04b5-44e4-809e-383819d39418] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 775.547101] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af2aa242-5741-4e64-9646-a0c6c9fcb631 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.554839] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cea1692-8d5a-4535-92bb-121e27f093bb {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.586882] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df859087-638f-4e29-9760-e29eb33cd3b3 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.595927] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d9c9d5f-8bd4-4af8-846c-c0302fdab3a5 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.609170] env[62503]: DEBUG nova.compute.provider_tree [None req-bed06986-6337-47b4-a3da-32c888b74905 tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 775.631307] env[62503]: ERROR nova.compute.manager [None req-3dbf8d33-063c-4c40-92c7-9bf6b35e4526 tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port e7c06575-72b9-4935-9724-8bb24022dfa9, please check neutron logs for more information. [ 775.631307] env[62503]: ERROR nova.compute.manager Traceback (most recent call last): [ 775.631307] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 775.631307] env[62503]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 775.631307] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 775.631307] env[62503]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 775.631307] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 775.631307] env[62503]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 775.631307] env[62503]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 775.631307] env[62503]: ERROR nova.compute.manager self.force_reraise() [ 775.631307] env[62503]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 775.631307] env[62503]: ERROR nova.compute.manager raise self.value [ 775.631307] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 775.631307] env[62503]: ERROR nova.compute.manager updated_port = self._update_port( [ 775.631307] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 775.631307] env[62503]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 775.631837] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 775.631837] env[62503]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 775.631837] env[62503]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port e7c06575-72b9-4935-9724-8bb24022dfa9, please check neutron logs for more information. [ 775.631837] env[62503]: ERROR nova.compute.manager [ 775.631837] env[62503]: Traceback (most recent call last): [ 775.631837] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 775.631837] env[62503]: listener.cb(fileno) [ 775.631837] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 775.631837] env[62503]: result = function(*args, **kwargs) [ 775.631837] env[62503]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 775.631837] env[62503]: return func(*args, **kwargs) [ 775.631837] env[62503]: File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 775.631837] env[62503]: raise e [ 775.631837] env[62503]: File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 775.631837] env[62503]: nwinfo = self.network_api.allocate_for_instance( [ 775.631837] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 775.631837] env[62503]: created_port_ids = self._update_ports_for_instance( [ 775.631837] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 775.631837] env[62503]: with excutils.save_and_reraise_exception(): [ 775.631837] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 775.631837] env[62503]: self.force_reraise() [ 775.631837] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 775.631837] env[62503]: raise self.value [ 775.631837] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 775.631837] env[62503]: updated_port = self._update_port( [ 775.631837] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 775.631837] env[62503]: _ensure_no_port_binding_failure(port) [ 775.631837] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 775.631837] env[62503]: raise exception.PortBindingFailed(port_id=port['id']) [ 775.632767] env[62503]: nova.exception.PortBindingFailed: Binding failed for port e7c06575-72b9-4935-9724-8bb24022dfa9, please check neutron logs for more information. [ 775.632767] env[62503]: Removing descriptor: 16 [ 775.632767] env[62503]: ERROR nova.compute.manager [None req-3dbf8d33-063c-4c40-92c7-9bf6b35e4526 tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] [instance: 32d4fda5-6d30-4416-b187-cf5548cb23bf] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port e7c06575-72b9-4935-9724-8bb24022dfa9, please check neutron logs for more information. [ 775.632767] env[62503]: ERROR nova.compute.manager [instance: 32d4fda5-6d30-4416-b187-cf5548cb23bf] Traceback (most recent call last): [ 775.632767] env[62503]: ERROR nova.compute.manager [instance: 32d4fda5-6d30-4416-b187-cf5548cb23bf] File "/opt/stack/nova/nova/compute/manager.py", line 2897, in _build_resources [ 775.632767] env[62503]: ERROR nova.compute.manager [instance: 32d4fda5-6d30-4416-b187-cf5548cb23bf] yield resources [ 775.632767] env[62503]: ERROR nova.compute.manager [instance: 32d4fda5-6d30-4416-b187-cf5548cb23bf] File "/opt/stack/nova/nova/compute/manager.py", line 2644, in _build_and_run_instance [ 775.632767] env[62503]: ERROR nova.compute.manager [instance: 32d4fda5-6d30-4416-b187-cf5548cb23bf] self.driver.spawn(context, instance, image_meta, [ 775.632767] env[62503]: ERROR nova.compute.manager [instance: 32d4fda5-6d30-4416-b187-cf5548cb23bf] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 775.632767] env[62503]: ERROR nova.compute.manager [instance: 32d4fda5-6d30-4416-b187-cf5548cb23bf] self._vmops.spawn(context, instance, image_meta, injected_files, [ 775.632767] env[62503]: ERROR nova.compute.manager [instance: 32d4fda5-6d30-4416-b187-cf5548cb23bf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 775.632767] env[62503]: ERROR nova.compute.manager [instance: 32d4fda5-6d30-4416-b187-cf5548cb23bf] vm_ref = self.build_virtual_machine(instance, [ 775.633177] env[62503]: ERROR nova.compute.manager [instance: 32d4fda5-6d30-4416-b187-cf5548cb23bf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 775.633177] env[62503]: ERROR nova.compute.manager [instance: 32d4fda5-6d30-4416-b187-cf5548cb23bf] vif_infos = vmwarevif.get_vif_info(self._session, [ 775.633177] env[62503]: ERROR nova.compute.manager [instance: 32d4fda5-6d30-4416-b187-cf5548cb23bf] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 775.633177] env[62503]: ERROR nova.compute.manager [instance: 32d4fda5-6d30-4416-b187-cf5548cb23bf] for vif in network_info: [ 775.633177] env[62503]: ERROR nova.compute.manager [instance: 32d4fda5-6d30-4416-b187-cf5548cb23bf] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 775.633177] env[62503]: ERROR nova.compute.manager [instance: 32d4fda5-6d30-4416-b187-cf5548cb23bf] return self._sync_wrapper(fn, *args, **kwargs) [ 775.633177] env[62503]: ERROR nova.compute.manager [instance: 32d4fda5-6d30-4416-b187-cf5548cb23bf] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 775.633177] env[62503]: ERROR nova.compute.manager [instance: 32d4fda5-6d30-4416-b187-cf5548cb23bf] self.wait() [ 775.633177] env[62503]: ERROR nova.compute.manager [instance: 32d4fda5-6d30-4416-b187-cf5548cb23bf] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 775.633177] env[62503]: ERROR nova.compute.manager [instance: 32d4fda5-6d30-4416-b187-cf5548cb23bf] self[:] = self._gt.wait() [ 775.633177] env[62503]: ERROR nova.compute.manager [instance: 32d4fda5-6d30-4416-b187-cf5548cb23bf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 775.633177] env[62503]: ERROR nova.compute.manager [instance: 32d4fda5-6d30-4416-b187-cf5548cb23bf] return self._exit_event.wait() [ 775.633177] env[62503]: ERROR nova.compute.manager [instance: 32d4fda5-6d30-4416-b187-cf5548cb23bf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 775.633609] env[62503]: ERROR nova.compute.manager [instance: 32d4fda5-6d30-4416-b187-cf5548cb23bf] result = hub.switch() [ 775.633609] env[62503]: ERROR nova.compute.manager [instance: 32d4fda5-6d30-4416-b187-cf5548cb23bf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 775.633609] env[62503]: ERROR nova.compute.manager [instance: 32d4fda5-6d30-4416-b187-cf5548cb23bf] return self.greenlet.switch() [ 775.633609] env[62503]: ERROR nova.compute.manager [instance: 32d4fda5-6d30-4416-b187-cf5548cb23bf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 775.633609] env[62503]: ERROR nova.compute.manager [instance: 32d4fda5-6d30-4416-b187-cf5548cb23bf] result = function(*args, **kwargs) [ 775.633609] env[62503]: ERROR nova.compute.manager [instance: 32d4fda5-6d30-4416-b187-cf5548cb23bf] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 775.633609] env[62503]: ERROR nova.compute.manager [instance: 32d4fda5-6d30-4416-b187-cf5548cb23bf] return func(*args, **kwargs) [ 775.633609] env[62503]: ERROR nova.compute.manager [instance: 32d4fda5-6d30-4416-b187-cf5548cb23bf] File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 775.633609] env[62503]: ERROR nova.compute.manager [instance: 32d4fda5-6d30-4416-b187-cf5548cb23bf] raise e [ 775.633609] env[62503]: ERROR nova.compute.manager [instance: 32d4fda5-6d30-4416-b187-cf5548cb23bf] File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 775.633609] env[62503]: ERROR nova.compute.manager [instance: 32d4fda5-6d30-4416-b187-cf5548cb23bf] nwinfo = self.network_api.allocate_for_instance( [ 775.633609] env[62503]: ERROR nova.compute.manager [instance: 32d4fda5-6d30-4416-b187-cf5548cb23bf] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 775.633609] env[62503]: ERROR nova.compute.manager [instance: 32d4fda5-6d30-4416-b187-cf5548cb23bf] created_port_ids = self._update_ports_for_instance( [ 775.634202] env[62503]: ERROR nova.compute.manager [instance: 32d4fda5-6d30-4416-b187-cf5548cb23bf] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 775.634202] env[62503]: ERROR nova.compute.manager [instance: 32d4fda5-6d30-4416-b187-cf5548cb23bf] with excutils.save_and_reraise_exception(): [ 775.634202] env[62503]: ERROR nova.compute.manager [instance: 32d4fda5-6d30-4416-b187-cf5548cb23bf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 775.634202] env[62503]: ERROR nova.compute.manager [instance: 32d4fda5-6d30-4416-b187-cf5548cb23bf] self.force_reraise() [ 775.634202] env[62503]: ERROR nova.compute.manager [instance: 32d4fda5-6d30-4416-b187-cf5548cb23bf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 775.634202] env[62503]: ERROR nova.compute.manager [instance: 32d4fda5-6d30-4416-b187-cf5548cb23bf] raise self.value [ 775.634202] env[62503]: ERROR nova.compute.manager [instance: 32d4fda5-6d30-4416-b187-cf5548cb23bf] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 775.634202] env[62503]: ERROR nova.compute.manager [instance: 32d4fda5-6d30-4416-b187-cf5548cb23bf] updated_port = self._update_port( [ 775.634202] env[62503]: ERROR nova.compute.manager [instance: 32d4fda5-6d30-4416-b187-cf5548cb23bf] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 775.634202] env[62503]: ERROR nova.compute.manager [instance: 32d4fda5-6d30-4416-b187-cf5548cb23bf] _ensure_no_port_binding_failure(port) [ 775.634202] env[62503]: ERROR nova.compute.manager [instance: 32d4fda5-6d30-4416-b187-cf5548cb23bf] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 775.634202] env[62503]: ERROR nova.compute.manager [instance: 32d4fda5-6d30-4416-b187-cf5548cb23bf] raise exception.PortBindingFailed(port_id=port['id']) [ 775.634581] env[62503]: ERROR nova.compute.manager [instance: 32d4fda5-6d30-4416-b187-cf5548cb23bf] nova.exception.PortBindingFailed: Binding failed for port e7c06575-72b9-4935-9724-8bb24022dfa9, please check neutron logs for more information. [ 775.634581] env[62503]: ERROR nova.compute.manager [instance: 32d4fda5-6d30-4416-b187-cf5548cb23bf] [ 775.634581] env[62503]: INFO nova.compute.manager [None req-3dbf8d33-063c-4c40-92c7-9bf6b35e4526 tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] [instance: 32d4fda5-6d30-4416-b187-cf5548cb23bf] Terminating instance [ 775.634825] env[62503]: DEBUG oslo_concurrency.lockutils [None req-3dbf8d33-063c-4c40-92c7-9bf6b35e4526 tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] Acquiring lock "refresh_cache-32d4fda5-6d30-4416-b187-cf5548cb23bf" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 775.885888] env[62503]: DEBUG nova.network.neutron [None req-35b849ab-bc6b-4b84-91df-53a0636c028a tempest-ServerMetadataNegativeTestJSON-362530580 tempest-ServerMetadataNegativeTestJSON-362530580-project-member] [instance: 48d9b18d-04b5-44e4-809e-383819d39418] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 775.927220] env[62503]: DEBUG nova.network.neutron [req-2c3c64eb-2ec2-457e-a33d-9e959246cf96 req-cdb79318-5aa8-4817-b647-7aa65e102280 service nova] [instance: 32d4fda5-6d30-4416-b187-cf5548cb23bf] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 776.021089] env[62503]: DEBUG nova.network.neutron [req-2c3c64eb-2ec2-457e-a33d-9e959246cf96 req-cdb79318-5aa8-4817-b647-7aa65e102280 service nova] [instance: 32d4fda5-6d30-4416-b187-cf5548cb23bf] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 776.115171] env[62503]: DEBUG nova.scheduler.client.report [None req-bed06986-6337-47b4-a3da-32c888b74905 tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 776.386509] env[62503]: INFO nova.compute.manager [None req-35b849ab-bc6b-4b84-91df-53a0636c028a tempest-ServerMetadataNegativeTestJSON-362530580 tempest-ServerMetadataNegativeTestJSON-362530580-project-member] [instance: 48d9b18d-04b5-44e4-809e-383819d39418] Took 1.04 seconds to deallocate network for instance. [ 776.441585] env[62503]: DEBUG oslo_concurrency.lockutils [None req-66c68936-6590-4907-b875-c3ea4a73524c tempest-ServersNegativeTestJSON-1133423638 tempest-ServersNegativeTestJSON-1133423638-project-member] Acquiring lock "1e355e38-60c6-4e7f-beb4-160c4527ec51" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 776.441585] env[62503]: DEBUG oslo_concurrency.lockutils [None req-66c68936-6590-4907-b875-c3ea4a73524c tempest-ServersNegativeTestJSON-1133423638 tempest-ServersNegativeTestJSON-1133423638-project-member] Lock "1e355e38-60c6-4e7f-beb4-160c4527ec51" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 776.524741] env[62503]: DEBUG oslo_concurrency.lockutils [req-2c3c64eb-2ec2-457e-a33d-9e959246cf96 req-cdb79318-5aa8-4817-b647-7aa65e102280 service nova] Releasing lock "refresh_cache-32d4fda5-6d30-4416-b187-cf5548cb23bf" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 776.525175] env[62503]: DEBUG oslo_concurrency.lockutils [None req-3dbf8d33-063c-4c40-92c7-9bf6b35e4526 tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] Acquired lock "refresh_cache-32d4fda5-6d30-4416-b187-cf5548cb23bf" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 776.525365] env[62503]: DEBUG nova.network.neutron [None req-3dbf8d33-063c-4c40-92c7-9bf6b35e4526 tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] [instance: 32d4fda5-6d30-4416-b187-cf5548cb23bf] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 776.620165] env[62503]: DEBUG oslo_concurrency.lockutils [None req-bed06986-6337-47b4-a3da-32c888b74905 tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.430s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 776.620677] env[62503]: DEBUG nova.compute.manager [None req-bed06986-6337-47b4-a3da-32c888b74905 tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] [instance: a4b600d2-b411-4957-92cb-7e8e462fde1d] Start building networks asynchronously for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2832}} [ 776.623169] env[62503]: DEBUG oslo_concurrency.lockutils [None req-5f6b79ba-d091-4f79-8bf5-714be49e7af9 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.830s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 777.062509] env[62503]: DEBUG nova.network.neutron [None req-3dbf8d33-063c-4c40-92c7-9bf6b35e4526 tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] [instance: 32d4fda5-6d30-4416-b187-cf5548cb23bf] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 777.130024] env[62503]: DEBUG nova.compute.utils [None req-bed06986-6337-47b4-a3da-32c888b74905 tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] Using /dev/sd instead of None {{(pid=62503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 777.133237] env[62503]: DEBUG nova.compute.manager [None req-bed06986-6337-47b4-a3da-32c888b74905 tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] [instance: a4b600d2-b411-4957-92cb-7e8e462fde1d] Allocating IP information in the background. {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 777.133401] env[62503]: DEBUG nova.network.neutron [None req-bed06986-6337-47b4-a3da-32c888b74905 tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] [instance: a4b600d2-b411-4957-92cb-7e8e462fde1d] allocate_for_instance() {{(pid=62503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 777.151100] env[62503]: DEBUG nova.network.neutron [None req-3dbf8d33-063c-4c40-92c7-9bf6b35e4526 tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] [instance: 32d4fda5-6d30-4416-b187-cf5548cb23bf] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 777.187333] env[62503]: DEBUG nova.policy [None req-bed06986-6337-47b4-a3da-32c888b74905 tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9d22ef1366854b6cad3923e38ca93241', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd404225d6f9c46148e0b7080ec1eee99', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62503) authorize /opt/stack/nova/nova/policy.py:201}} [ 777.396982] env[62503]: DEBUG nova.compute.manager [req-84675291-186c-4248-b919-b45c34546a27 req-7c8d646b-a367-4d46-abe7-53eee929ae67 service nova] [instance: 32d4fda5-6d30-4416-b187-cf5548cb23bf] Received event network-vif-deleted-e7c06575-72b9-4935-9724-8bb24022dfa9 {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 777.413331] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9beef85f-8e1e-4d27-9995-c48d7b8ddaa6 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.421606] env[62503]: INFO nova.scheduler.client.report [None req-35b849ab-bc6b-4b84-91df-53a0636c028a tempest-ServerMetadataNegativeTestJSON-362530580 tempest-ServerMetadataNegativeTestJSON-362530580-project-member] Deleted allocations for instance 48d9b18d-04b5-44e4-809e-383819d39418 [ 777.431020] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7226816a-ec71-4461-b9cc-50f4daaf2c3b {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.464312] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32f83554-87fb-40c4-857e-eeae20abe718 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.472372] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ecbbd2b-18fa-4860-a60a-1736bf01f443 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.486414] env[62503]: DEBUG nova.compute.provider_tree [None req-5f6b79ba-d091-4f79-8bf5-714be49e7af9 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 777.494203] env[62503]: DEBUG nova.network.neutron [None req-bed06986-6337-47b4-a3da-32c888b74905 tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] [instance: a4b600d2-b411-4957-92cb-7e8e462fde1d] Successfully created port: 884f6499-41ed-4dce-9197-75e5aaf3e2ce {{(pid=62503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 777.633759] env[62503]: DEBUG nova.compute.manager [None req-bed06986-6337-47b4-a3da-32c888b74905 tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] [instance: a4b600d2-b411-4957-92cb-7e8e462fde1d] Start building block device mappings for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2867}} [ 777.653972] env[62503]: DEBUG oslo_concurrency.lockutils [None req-3dbf8d33-063c-4c40-92c7-9bf6b35e4526 tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] Releasing lock "refresh_cache-32d4fda5-6d30-4416-b187-cf5548cb23bf" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 777.656019] env[62503]: DEBUG nova.compute.manager [None req-3dbf8d33-063c-4c40-92c7-9bf6b35e4526 tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] [instance: 32d4fda5-6d30-4416-b187-cf5548cb23bf] Start destroying the instance on the hypervisor. {{(pid=62503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3156}} [ 777.656019] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-3dbf8d33-063c-4c40-92c7-9bf6b35e4526 tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] [instance: 32d4fda5-6d30-4416-b187-cf5548cb23bf] Destroying instance {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 777.656019] env[62503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-01fde429-3e7b-41f1-8351-bafc75c8e174 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.663965] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-577237c1-3a2f-492c-9e52-cabb7e3a08f1 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.687674] env[62503]: WARNING nova.virt.vmwareapi.vmops [None req-3dbf8d33-063c-4c40-92c7-9bf6b35e4526 tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] [instance: 32d4fda5-6d30-4416-b187-cf5548cb23bf] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 32d4fda5-6d30-4416-b187-cf5548cb23bf could not be found. [ 777.687918] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-3dbf8d33-063c-4c40-92c7-9bf6b35e4526 tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] [instance: 32d4fda5-6d30-4416-b187-cf5548cb23bf] Instance destroyed {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 777.688117] env[62503]: INFO nova.compute.manager [None req-3dbf8d33-063c-4c40-92c7-9bf6b35e4526 tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] [instance: 32d4fda5-6d30-4416-b187-cf5548cb23bf] Took 0.03 seconds to destroy the instance on the hypervisor. [ 777.688367] env[62503]: DEBUG oslo.service.loopingcall [None req-3dbf8d33-063c-4c40-92c7-9bf6b35e4526 tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 777.688574] env[62503]: DEBUG nova.compute.manager [-] [instance: 32d4fda5-6d30-4416-b187-cf5548cb23bf] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 777.688665] env[62503]: DEBUG nova.network.neutron [-] [instance: 32d4fda5-6d30-4416-b187-cf5548cb23bf] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 777.722976] env[62503]: DEBUG nova.network.neutron [-] [instance: 32d4fda5-6d30-4416-b187-cf5548cb23bf] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 777.932267] env[62503]: DEBUG oslo_concurrency.lockutils [None req-35b849ab-bc6b-4b84-91df-53a0636c028a tempest-ServerMetadataNegativeTestJSON-362530580 tempest-ServerMetadataNegativeTestJSON-362530580-project-member] Lock "48d9b18d-04b5-44e4-809e-383819d39418" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 131.723s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 777.993182] env[62503]: DEBUG nova.scheduler.client.report [None req-5f6b79ba-d091-4f79-8bf5-714be49e7af9 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 778.272695] env[62503]: DEBUG nova.compute.manager [req-f65bf378-7f76-482b-93f4-d307d0606744 req-c66e0dc1-a936-4f37-99b9-dd7c1a1aabd9 service nova] [instance: a4b600d2-b411-4957-92cb-7e8e462fde1d] Received event network-changed-884f6499-41ed-4dce-9197-75e5aaf3e2ce {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 778.272935] env[62503]: DEBUG nova.compute.manager [req-f65bf378-7f76-482b-93f4-d307d0606744 req-c66e0dc1-a936-4f37-99b9-dd7c1a1aabd9 service nova] [instance: a4b600d2-b411-4957-92cb-7e8e462fde1d] Refreshing instance network info cache due to event network-changed-884f6499-41ed-4dce-9197-75e5aaf3e2ce. {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11432}} [ 778.272986] env[62503]: DEBUG oslo_concurrency.lockutils [req-f65bf378-7f76-482b-93f4-d307d0606744 req-c66e0dc1-a936-4f37-99b9-dd7c1a1aabd9 service nova] Acquiring lock "refresh_cache-a4b600d2-b411-4957-92cb-7e8e462fde1d" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 778.273122] env[62503]: DEBUG oslo_concurrency.lockutils [req-f65bf378-7f76-482b-93f4-d307d0606744 req-c66e0dc1-a936-4f37-99b9-dd7c1a1aabd9 service nova] Acquired lock "refresh_cache-a4b600d2-b411-4957-92cb-7e8e462fde1d" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 778.273290] env[62503]: DEBUG nova.network.neutron [req-f65bf378-7f76-482b-93f4-d307d0606744 req-c66e0dc1-a936-4f37-99b9-dd7c1a1aabd9 service nova] [instance: a4b600d2-b411-4957-92cb-7e8e462fde1d] Refreshing network info cache for port 884f6499-41ed-4dce-9197-75e5aaf3e2ce {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 778.425519] env[62503]: ERROR nova.compute.manager [None req-bed06986-6337-47b4-a3da-32c888b74905 tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 884f6499-41ed-4dce-9197-75e5aaf3e2ce, please check neutron logs for more information. [ 778.425519] env[62503]: ERROR nova.compute.manager Traceback (most recent call last): [ 778.425519] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 778.425519] env[62503]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 778.425519] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 778.425519] env[62503]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 778.425519] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 778.425519] env[62503]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 778.425519] env[62503]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 778.425519] env[62503]: ERROR nova.compute.manager self.force_reraise() [ 778.425519] env[62503]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 778.425519] env[62503]: ERROR nova.compute.manager raise self.value [ 778.425519] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 778.425519] env[62503]: ERROR nova.compute.manager updated_port = self._update_port( [ 778.425519] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 778.425519] env[62503]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 778.426127] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 778.426127] env[62503]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 778.426127] env[62503]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 884f6499-41ed-4dce-9197-75e5aaf3e2ce, please check neutron logs for more information. [ 778.426127] env[62503]: ERROR nova.compute.manager [ 778.426127] env[62503]: Traceback (most recent call last): [ 778.426127] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 778.426127] env[62503]: listener.cb(fileno) [ 778.426127] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 778.426127] env[62503]: result = function(*args, **kwargs) [ 778.426127] env[62503]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 778.426127] env[62503]: return func(*args, **kwargs) [ 778.426127] env[62503]: File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 778.426127] env[62503]: raise e [ 778.426127] env[62503]: File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 778.426127] env[62503]: nwinfo = self.network_api.allocate_for_instance( [ 778.426127] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 778.426127] env[62503]: created_port_ids = self._update_ports_for_instance( [ 778.426127] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 778.426127] env[62503]: with excutils.save_and_reraise_exception(): [ 778.426127] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 778.426127] env[62503]: self.force_reraise() [ 778.426127] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 778.426127] env[62503]: raise self.value [ 778.426127] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 778.426127] env[62503]: updated_port = self._update_port( [ 778.426127] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 778.426127] env[62503]: _ensure_no_port_binding_failure(port) [ 778.426127] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 778.426127] env[62503]: raise exception.PortBindingFailed(port_id=port['id']) [ 778.427052] env[62503]: nova.exception.PortBindingFailed: Binding failed for port 884f6499-41ed-4dce-9197-75e5aaf3e2ce, please check neutron logs for more information. [ 778.427052] env[62503]: Removing descriptor: 14 [ 778.435354] env[62503]: DEBUG nova.compute.manager [None req-1bdb5a3e-b1f3-497d-a480-68bcfc62ffc1 tempest-InstanceActionsNegativeTestJSON-133759453 tempest-InstanceActionsNegativeTestJSON-133759453-project-member] [instance: 1251e59f-9c01-4115-8400-40aacedd97e2] Starting instance... {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 778.498027] env[62503]: DEBUG oslo_concurrency.lockutils [None req-5f6b79ba-d091-4f79-8bf5-714be49e7af9 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.875s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 778.498733] env[62503]: ERROR nova.compute.manager [None req-5f6b79ba-d091-4f79-8bf5-714be49e7af9 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] [instance: b9259ced-344a-42e5-835d-3713631a68c7] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port cb70e532-2d98-4792-8aa5-ace14414265b, please check neutron logs for more information. [ 778.498733] env[62503]: ERROR nova.compute.manager [instance: b9259ced-344a-42e5-835d-3713631a68c7] Traceback (most recent call last): [ 778.498733] env[62503]: ERROR nova.compute.manager [instance: b9259ced-344a-42e5-835d-3713631a68c7] File "/opt/stack/nova/nova/compute/manager.py", line 2644, in _build_and_run_instance [ 778.498733] env[62503]: ERROR nova.compute.manager [instance: b9259ced-344a-42e5-835d-3713631a68c7] self.driver.spawn(context, instance, image_meta, [ 778.498733] env[62503]: ERROR nova.compute.manager [instance: b9259ced-344a-42e5-835d-3713631a68c7] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 778.498733] env[62503]: ERROR nova.compute.manager [instance: b9259ced-344a-42e5-835d-3713631a68c7] self._vmops.spawn(context, instance, image_meta, injected_files, [ 778.498733] env[62503]: ERROR nova.compute.manager [instance: b9259ced-344a-42e5-835d-3713631a68c7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 778.498733] env[62503]: ERROR nova.compute.manager [instance: b9259ced-344a-42e5-835d-3713631a68c7] vm_ref = self.build_virtual_machine(instance, [ 778.498733] env[62503]: ERROR nova.compute.manager [instance: b9259ced-344a-42e5-835d-3713631a68c7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 778.498733] env[62503]: ERROR nova.compute.manager [instance: b9259ced-344a-42e5-835d-3713631a68c7] vif_infos = vmwarevif.get_vif_info(self._session, [ 778.498733] env[62503]: ERROR nova.compute.manager [instance: b9259ced-344a-42e5-835d-3713631a68c7] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 778.499904] env[62503]: ERROR nova.compute.manager [instance: b9259ced-344a-42e5-835d-3713631a68c7] for vif in network_info: [ 778.499904] env[62503]: ERROR nova.compute.manager [instance: b9259ced-344a-42e5-835d-3713631a68c7] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 778.499904] env[62503]: ERROR nova.compute.manager [instance: b9259ced-344a-42e5-835d-3713631a68c7] return self._sync_wrapper(fn, *args, **kwargs) [ 778.499904] env[62503]: ERROR nova.compute.manager [instance: b9259ced-344a-42e5-835d-3713631a68c7] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 778.499904] env[62503]: ERROR nova.compute.manager [instance: b9259ced-344a-42e5-835d-3713631a68c7] self.wait() [ 778.499904] env[62503]: ERROR nova.compute.manager [instance: b9259ced-344a-42e5-835d-3713631a68c7] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 778.499904] env[62503]: ERROR nova.compute.manager [instance: b9259ced-344a-42e5-835d-3713631a68c7] self[:] = self._gt.wait() [ 778.499904] env[62503]: ERROR nova.compute.manager [instance: b9259ced-344a-42e5-835d-3713631a68c7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 778.499904] env[62503]: ERROR nova.compute.manager [instance: b9259ced-344a-42e5-835d-3713631a68c7] return self._exit_event.wait() [ 778.499904] env[62503]: ERROR nova.compute.manager [instance: b9259ced-344a-42e5-835d-3713631a68c7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 778.499904] env[62503]: ERROR nova.compute.manager [instance: b9259ced-344a-42e5-835d-3713631a68c7] current.throw(*self._exc) [ 778.499904] env[62503]: ERROR nova.compute.manager [instance: b9259ced-344a-42e5-835d-3713631a68c7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 778.499904] env[62503]: ERROR nova.compute.manager [instance: b9259ced-344a-42e5-835d-3713631a68c7] result = function(*args, **kwargs) [ 778.500586] env[62503]: ERROR nova.compute.manager [instance: b9259ced-344a-42e5-835d-3713631a68c7] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 778.500586] env[62503]: ERROR nova.compute.manager [instance: b9259ced-344a-42e5-835d-3713631a68c7] return func(*args, **kwargs) [ 778.500586] env[62503]: ERROR nova.compute.manager [instance: b9259ced-344a-42e5-835d-3713631a68c7] File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 778.500586] env[62503]: ERROR nova.compute.manager [instance: b9259ced-344a-42e5-835d-3713631a68c7] raise e [ 778.500586] env[62503]: ERROR nova.compute.manager [instance: b9259ced-344a-42e5-835d-3713631a68c7] File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 778.500586] env[62503]: ERROR nova.compute.manager [instance: b9259ced-344a-42e5-835d-3713631a68c7] nwinfo = self.network_api.allocate_for_instance( [ 778.500586] env[62503]: ERROR nova.compute.manager [instance: b9259ced-344a-42e5-835d-3713631a68c7] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 778.500586] env[62503]: ERROR nova.compute.manager [instance: b9259ced-344a-42e5-835d-3713631a68c7] created_port_ids = self._update_ports_for_instance( [ 778.500586] env[62503]: ERROR nova.compute.manager [instance: b9259ced-344a-42e5-835d-3713631a68c7] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 778.500586] env[62503]: ERROR nova.compute.manager [instance: b9259ced-344a-42e5-835d-3713631a68c7] with excutils.save_and_reraise_exception(): [ 778.500586] env[62503]: ERROR nova.compute.manager [instance: b9259ced-344a-42e5-835d-3713631a68c7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 778.500586] env[62503]: ERROR nova.compute.manager [instance: b9259ced-344a-42e5-835d-3713631a68c7] self.force_reraise() [ 778.500586] env[62503]: ERROR nova.compute.manager [instance: b9259ced-344a-42e5-835d-3713631a68c7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 778.501160] env[62503]: ERROR nova.compute.manager [instance: b9259ced-344a-42e5-835d-3713631a68c7] raise self.value [ 778.501160] env[62503]: ERROR nova.compute.manager [instance: b9259ced-344a-42e5-835d-3713631a68c7] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 778.501160] env[62503]: ERROR nova.compute.manager [instance: b9259ced-344a-42e5-835d-3713631a68c7] updated_port = self._update_port( [ 778.501160] env[62503]: ERROR nova.compute.manager [instance: b9259ced-344a-42e5-835d-3713631a68c7] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 778.501160] env[62503]: ERROR nova.compute.manager [instance: b9259ced-344a-42e5-835d-3713631a68c7] _ensure_no_port_binding_failure(port) [ 778.501160] env[62503]: ERROR nova.compute.manager [instance: b9259ced-344a-42e5-835d-3713631a68c7] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 778.501160] env[62503]: ERROR nova.compute.manager [instance: b9259ced-344a-42e5-835d-3713631a68c7] raise exception.PortBindingFailed(port_id=port['id']) [ 778.501160] env[62503]: ERROR nova.compute.manager [instance: b9259ced-344a-42e5-835d-3713631a68c7] nova.exception.PortBindingFailed: Binding failed for port cb70e532-2d98-4792-8aa5-ace14414265b, please check neutron logs for more information. [ 778.501160] env[62503]: ERROR nova.compute.manager [instance: b9259ced-344a-42e5-835d-3713631a68c7] [ 778.501160] env[62503]: DEBUG nova.compute.utils [None req-5f6b79ba-d091-4f79-8bf5-714be49e7af9 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] [instance: b9259ced-344a-42e5-835d-3713631a68c7] Binding failed for port cb70e532-2d98-4792-8aa5-ace14414265b, please check neutron logs for more information. {{(pid=62503) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 778.501641] env[62503]: DEBUG oslo_concurrency.lockutils [None req-7b8e1dad-5e5d-4734-b8ed-b199db0d06eb tempest-ServersNegativeTestMultiTenantJSON-701390068 tempest-ServersNegativeTestMultiTenantJSON-701390068-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.329s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 778.503596] env[62503]: DEBUG nova.compute.manager [None req-5f6b79ba-d091-4f79-8bf5-714be49e7af9 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] [instance: b9259ced-344a-42e5-835d-3713631a68c7] Build of instance b9259ced-344a-42e5-835d-3713631a68c7 was re-scheduled: Binding failed for port cb70e532-2d98-4792-8aa5-ace14414265b, please check neutron logs for more information. {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2483}} [ 778.503936] env[62503]: DEBUG nova.compute.manager [None req-5f6b79ba-d091-4f79-8bf5-714be49e7af9 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] [instance: b9259ced-344a-42e5-835d-3713631a68c7] Unplugging VIFs for instance {{(pid=62503) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3009}} [ 778.504180] env[62503]: DEBUG oslo_concurrency.lockutils [None req-5f6b79ba-d091-4f79-8bf5-714be49e7af9 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] Acquiring lock "refresh_cache-b9259ced-344a-42e5-835d-3713631a68c7" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 778.504326] env[62503]: DEBUG oslo_concurrency.lockutils [None req-5f6b79ba-d091-4f79-8bf5-714be49e7af9 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] Acquired lock "refresh_cache-b9259ced-344a-42e5-835d-3713631a68c7" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 778.504483] env[62503]: DEBUG nova.network.neutron [None req-5f6b79ba-d091-4f79-8bf5-714be49e7af9 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] [instance: b9259ced-344a-42e5-835d-3713631a68c7] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 778.624744] env[62503]: DEBUG nova.network.neutron [-] [instance: 32d4fda5-6d30-4416-b187-cf5548cb23bf] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 778.643251] env[62503]: DEBUG nova.compute.manager [None req-bed06986-6337-47b4-a3da-32c888b74905 tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] [instance: a4b600d2-b411-4957-92cb-7e8e462fde1d] Start spawning the instance on the hypervisor. {{(pid=62503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2641}} [ 778.669217] env[62503]: DEBUG nova.virt.hardware [None req-bed06986-6337-47b4-a3da-32c888b74905 tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:26:20Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:26:03Z,direct_url=,disk_format='vmdk',id=8150ca02-f879-471d-8913-459408f127a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26d9ee75d25b4018b8bb1fb11c8bd98c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:26:04Z,virtual_size=,visibility=), allow threads: False {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 778.669217] env[62503]: DEBUG nova.virt.hardware [None req-bed06986-6337-47b4-a3da-32c888b74905 tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] Flavor limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 778.669217] env[62503]: DEBUG nova.virt.hardware [None req-bed06986-6337-47b4-a3da-32c888b74905 tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] Image limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 778.669394] env[62503]: DEBUG nova.virt.hardware [None req-bed06986-6337-47b4-a3da-32c888b74905 tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] Flavor pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 778.669394] env[62503]: DEBUG nova.virt.hardware [None req-bed06986-6337-47b4-a3da-32c888b74905 tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] Image pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 778.669394] env[62503]: DEBUG nova.virt.hardware [None req-bed06986-6337-47b4-a3da-32c888b74905 tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 778.669394] env[62503]: DEBUG nova.virt.hardware [None req-bed06986-6337-47b4-a3da-32c888b74905 tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 778.669394] env[62503]: DEBUG nova.virt.hardware [None req-bed06986-6337-47b4-a3da-32c888b74905 tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 778.669578] env[62503]: DEBUG nova.virt.hardware [None req-bed06986-6337-47b4-a3da-32c888b74905 tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] Got 1 possible topologies {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 778.669578] env[62503]: DEBUG nova.virt.hardware [None req-bed06986-6337-47b4-a3da-32c888b74905 tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 778.669578] env[62503]: DEBUG nova.virt.hardware [None req-bed06986-6337-47b4-a3da-32c888b74905 tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 778.670736] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7df807fe-dcd6-4523-94e8-1deb3745a3ba {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.679086] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50b07319-01f5-436e-9d3a-6dd4b8a54b35 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.693843] env[62503]: ERROR nova.compute.manager [None req-bed06986-6337-47b4-a3da-32c888b74905 tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] [instance: a4b600d2-b411-4957-92cb-7e8e462fde1d] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 884f6499-41ed-4dce-9197-75e5aaf3e2ce, please check neutron logs for more information. [ 778.693843] env[62503]: ERROR nova.compute.manager [instance: a4b600d2-b411-4957-92cb-7e8e462fde1d] Traceback (most recent call last): [ 778.693843] env[62503]: ERROR nova.compute.manager [instance: a4b600d2-b411-4957-92cb-7e8e462fde1d] File "/opt/stack/nova/nova/compute/manager.py", line 2897, in _build_resources [ 778.693843] env[62503]: ERROR nova.compute.manager [instance: a4b600d2-b411-4957-92cb-7e8e462fde1d] yield resources [ 778.693843] env[62503]: ERROR nova.compute.manager [instance: a4b600d2-b411-4957-92cb-7e8e462fde1d] File "/opt/stack/nova/nova/compute/manager.py", line 2644, in _build_and_run_instance [ 778.693843] env[62503]: ERROR nova.compute.manager [instance: a4b600d2-b411-4957-92cb-7e8e462fde1d] self.driver.spawn(context, instance, image_meta, [ 778.693843] env[62503]: ERROR nova.compute.manager [instance: a4b600d2-b411-4957-92cb-7e8e462fde1d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 778.693843] env[62503]: ERROR nova.compute.manager [instance: a4b600d2-b411-4957-92cb-7e8e462fde1d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 778.693843] env[62503]: ERROR nova.compute.manager [instance: a4b600d2-b411-4957-92cb-7e8e462fde1d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 778.693843] env[62503]: ERROR nova.compute.manager [instance: a4b600d2-b411-4957-92cb-7e8e462fde1d] vm_ref = self.build_virtual_machine(instance, [ 778.693843] env[62503]: ERROR nova.compute.manager [instance: a4b600d2-b411-4957-92cb-7e8e462fde1d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 778.694312] env[62503]: ERROR nova.compute.manager [instance: a4b600d2-b411-4957-92cb-7e8e462fde1d] vif_infos = vmwarevif.get_vif_info(self._session, [ 778.694312] env[62503]: ERROR nova.compute.manager [instance: a4b600d2-b411-4957-92cb-7e8e462fde1d] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 778.694312] env[62503]: ERROR nova.compute.manager [instance: a4b600d2-b411-4957-92cb-7e8e462fde1d] for vif in network_info: [ 778.694312] env[62503]: ERROR nova.compute.manager [instance: a4b600d2-b411-4957-92cb-7e8e462fde1d] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 778.694312] env[62503]: ERROR nova.compute.manager [instance: a4b600d2-b411-4957-92cb-7e8e462fde1d] return self._sync_wrapper(fn, *args, **kwargs) [ 778.694312] env[62503]: ERROR nova.compute.manager [instance: a4b600d2-b411-4957-92cb-7e8e462fde1d] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 778.694312] env[62503]: ERROR nova.compute.manager [instance: a4b600d2-b411-4957-92cb-7e8e462fde1d] self.wait() [ 778.694312] env[62503]: ERROR nova.compute.manager [instance: a4b600d2-b411-4957-92cb-7e8e462fde1d] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 778.694312] env[62503]: ERROR nova.compute.manager [instance: a4b600d2-b411-4957-92cb-7e8e462fde1d] self[:] = self._gt.wait() [ 778.694312] env[62503]: ERROR nova.compute.manager [instance: a4b600d2-b411-4957-92cb-7e8e462fde1d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 778.694312] env[62503]: ERROR nova.compute.manager [instance: a4b600d2-b411-4957-92cb-7e8e462fde1d] return self._exit_event.wait() [ 778.694312] env[62503]: ERROR nova.compute.manager [instance: a4b600d2-b411-4957-92cb-7e8e462fde1d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 778.694312] env[62503]: ERROR nova.compute.manager [instance: a4b600d2-b411-4957-92cb-7e8e462fde1d] current.throw(*self._exc) [ 778.694765] env[62503]: ERROR nova.compute.manager [instance: a4b600d2-b411-4957-92cb-7e8e462fde1d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 778.694765] env[62503]: ERROR nova.compute.manager [instance: a4b600d2-b411-4957-92cb-7e8e462fde1d] result = function(*args, **kwargs) [ 778.694765] env[62503]: ERROR nova.compute.manager [instance: a4b600d2-b411-4957-92cb-7e8e462fde1d] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 778.694765] env[62503]: ERROR nova.compute.manager [instance: a4b600d2-b411-4957-92cb-7e8e462fde1d] return func(*args, **kwargs) [ 778.694765] env[62503]: ERROR nova.compute.manager [instance: a4b600d2-b411-4957-92cb-7e8e462fde1d] File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 778.694765] env[62503]: ERROR nova.compute.manager [instance: a4b600d2-b411-4957-92cb-7e8e462fde1d] raise e [ 778.694765] env[62503]: ERROR nova.compute.manager [instance: a4b600d2-b411-4957-92cb-7e8e462fde1d] File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 778.694765] env[62503]: ERROR nova.compute.manager [instance: a4b600d2-b411-4957-92cb-7e8e462fde1d] nwinfo = self.network_api.allocate_for_instance( [ 778.694765] env[62503]: ERROR nova.compute.manager [instance: a4b600d2-b411-4957-92cb-7e8e462fde1d] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 778.694765] env[62503]: ERROR nova.compute.manager [instance: a4b600d2-b411-4957-92cb-7e8e462fde1d] created_port_ids = self._update_ports_for_instance( [ 778.694765] env[62503]: ERROR nova.compute.manager [instance: a4b600d2-b411-4957-92cb-7e8e462fde1d] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 778.694765] env[62503]: ERROR nova.compute.manager [instance: a4b600d2-b411-4957-92cb-7e8e462fde1d] with excutils.save_and_reraise_exception(): [ 778.694765] env[62503]: ERROR nova.compute.manager [instance: a4b600d2-b411-4957-92cb-7e8e462fde1d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 778.695221] env[62503]: ERROR nova.compute.manager [instance: a4b600d2-b411-4957-92cb-7e8e462fde1d] self.force_reraise() [ 778.695221] env[62503]: ERROR nova.compute.manager [instance: a4b600d2-b411-4957-92cb-7e8e462fde1d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 778.695221] env[62503]: ERROR nova.compute.manager [instance: a4b600d2-b411-4957-92cb-7e8e462fde1d] raise self.value [ 778.695221] env[62503]: ERROR nova.compute.manager [instance: a4b600d2-b411-4957-92cb-7e8e462fde1d] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 778.695221] env[62503]: ERROR nova.compute.manager [instance: a4b600d2-b411-4957-92cb-7e8e462fde1d] updated_port = self._update_port( [ 778.695221] env[62503]: ERROR nova.compute.manager [instance: a4b600d2-b411-4957-92cb-7e8e462fde1d] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 778.695221] env[62503]: ERROR nova.compute.manager [instance: a4b600d2-b411-4957-92cb-7e8e462fde1d] _ensure_no_port_binding_failure(port) [ 778.695221] env[62503]: ERROR nova.compute.manager [instance: a4b600d2-b411-4957-92cb-7e8e462fde1d] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 778.695221] env[62503]: ERROR nova.compute.manager [instance: a4b600d2-b411-4957-92cb-7e8e462fde1d] raise exception.PortBindingFailed(port_id=port['id']) [ 778.695221] env[62503]: ERROR nova.compute.manager [instance: a4b600d2-b411-4957-92cb-7e8e462fde1d] nova.exception.PortBindingFailed: Binding failed for port 884f6499-41ed-4dce-9197-75e5aaf3e2ce, please check neutron logs for more information. [ 778.695221] env[62503]: ERROR nova.compute.manager [instance: a4b600d2-b411-4957-92cb-7e8e462fde1d] [ 778.697377] env[62503]: INFO nova.compute.manager [None req-bed06986-6337-47b4-a3da-32c888b74905 tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] [instance: a4b600d2-b411-4957-92cb-7e8e462fde1d] Terminating instance [ 778.698145] env[62503]: DEBUG oslo_concurrency.lockutils [None req-bed06986-6337-47b4-a3da-32c888b74905 tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] Acquiring lock "refresh_cache-a4b600d2-b411-4957-92cb-7e8e462fde1d" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 778.798906] env[62503]: DEBUG nova.network.neutron [req-f65bf378-7f76-482b-93f4-d307d0606744 req-c66e0dc1-a936-4f37-99b9-dd7c1a1aabd9 service nova] [instance: a4b600d2-b411-4957-92cb-7e8e462fde1d] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 778.885280] env[62503]: DEBUG nova.network.neutron [req-f65bf378-7f76-482b-93f4-d307d0606744 req-c66e0dc1-a936-4f37-99b9-dd7c1a1aabd9 service nova] [instance: a4b600d2-b411-4957-92cb-7e8e462fde1d] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 778.956087] env[62503]: DEBUG oslo_concurrency.lockutils [None req-1bdb5a3e-b1f3-497d-a480-68bcfc62ffc1 tempest-InstanceActionsNegativeTestJSON-133759453 tempest-InstanceActionsNegativeTestJSON-133759453-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 779.028344] env[62503]: DEBUG nova.network.neutron [None req-5f6b79ba-d091-4f79-8bf5-714be49e7af9 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] [instance: b9259ced-344a-42e5-835d-3713631a68c7] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 779.127242] env[62503]: DEBUG nova.network.neutron [None req-5f6b79ba-d091-4f79-8bf5-714be49e7af9 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] [instance: b9259ced-344a-42e5-835d-3713631a68c7] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 779.129186] env[62503]: INFO nova.compute.manager [-] [instance: 32d4fda5-6d30-4416-b187-cf5548cb23bf] Took 1.44 seconds to deallocate network for instance. [ 779.130895] env[62503]: DEBUG nova.compute.claims [None req-3dbf8d33-063c-4c40-92c7-9bf6b35e4526 tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] [instance: 32d4fda5-6d30-4416-b187-cf5548cb23bf] Aborting claim: {{(pid=62503) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 779.131201] env[62503]: DEBUG oslo_concurrency.lockutils [None req-3dbf8d33-063c-4c40-92c7-9bf6b35e4526 tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 779.297750] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e37c6fc-00c3-49f0-9d64-74f00ac8394e {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.304970] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b225e8b6-5d6b-468d-a49d-d9cff7295991 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.335136] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-483fca96-2527-4179-8a98-840c0ac55376 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.341690] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5d6adc4-c26d-439c-8334-c052759392fa {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.354028] env[62503]: DEBUG nova.compute.provider_tree [None req-7b8e1dad-5e5d-4734-b8ed-b199db0d06eb tempest-ServersNegativeTestMultiTenantJSON-701390068 tempest-ServersNegativeTestMultiTenantJSON-701390068-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 779.390312] env[62503]: DEBUG oslo_concurrency.lockutils [req-f65bf378-7f76-482b-93f4-d307d0606744 req-c66e0dc1-a936-4f37-99b9-dd7c1a1aabd9 service nova] Releasing lock "refresh_cache-a4b600d2-b411-4957-92cb-7e8e462fde1d" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 779.390430] env[62503]: DEBUG oslo_concurrency.lockutils [None req-bed06986-6337-47b4-a3da-32c888b74905 tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] Acquired lock "refresh_cache-a4b600d2-b411-4957-92cb-7e8e462fde1d" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 779.391440] env[62503]: DEBUG nova.network.neutron [None req-bed06986-6337-47b4-a3da-32c888b74905 tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] [instance: a4b600d2-b411-4957-92cb-7e8e462fde1d] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 779.630618] env[62503]: DEBUG oslo_concurrency.lockutils [None req-5f6b79ba-d091-4f79-8bf5-714be49e7af9 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] Releasing lock "refresh_cache-b9259ced-344a-42e5-835d-3713631a68c7" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 779.630863] env[62503]: DEBUG nova.compute.manager [None req-5f6b79ba-d091-4f79-8bf5-714be49e7af9 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62503) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3032}} [ 779.631056] env[62503]: DEBUG nova.compute.manager [None req-5f6b79ba-d091-4f79-8bf5-714be49e7af9 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] [instance: b9259ced-344a-42e5-835d-3713631a68c7] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 779.631232] env[62503]: DEBUG nova.network.neutron [None req-5f6b79ba-d091-4f79-8bf5-714be49e7af9 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] [instance: b9259ced-344a-42e5-835d-3713631a68c7] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 779.647157] env[62503]: DEBUG nova.network.neutron [None req-5f6b79ba-d091-4f79-8bf5-714be49e7af9 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] [instance: b9259ced-344a-42e5-835d-3713631a68c7] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 779.859714] env[62503]: DEBUG nova.scheduler.client.report [None req-7b8e1dad-5e5d-4734-b8ed-b199db0d06eb tempest-ServersNegativeTestMultiTenantJSON-701390068 tempest-ServersNegativeTestMultiTenantJSON-701390068-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 779.913554] env[62503]: DEBUG nova.network.neutron [None req-bed06986-6337-47b4-a3da-32c888b74905 tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] [instance: a4b600d2-b411-4957-92cb-7e8e462fde1d] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 780.058213] env[62503]: DEBUG nova.network.neutron [None req-bed06986-6337-47b4-a3da-32c888b74905 tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] [instance: a4b600d2-b411-4957-92cb-7e8e462fde1d] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 780.151106] env[62503]: DEBUG nova.network.neutron [None req-5f6b79ba-d091-4f79-8bf5-714be49e7af9 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] [instance: b9259ced-344a-42e5-835d-3713631a68c7] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 780.316764] env[62503]: DEBUG nova.compute.manager [req-a87a11f9-2581-4ae6-aeec-d5556436dd19 req-2952e771-ae21-448f-b451-25aad6f8323d service nova] [instance: a4b600d2-b411-4957-92cb-7e8e462fde1d] Received event network-vif-deleted-884f6499-41ed-4dce-9197-75e5aaf3e2ce {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 780.364599] env[62503]: DEBUG oslo_concurrency.lockutils [None req-7b8e1dad-5e5d-4734-b8ed-b199db0d06eb tempest-ServersNegativeTestMultiTenantJSON-701390068 tempest-ServersNegativeTestMultiTenantJSON-701390068-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.864s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 780.365249] env[62503]: ERROR nova.compute.manager [None req-7b8e1dad-5e5d-4734-b8ed-b199db0d06eb tempest-ServersNegativeTestMultiTenantJSON-701390068 tempest-ServersNegativeTestMultiTenantJSON-701390068-project-member] [instance: cf6fb485-9672-42b5-ac88-bbf5e0941393] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 24c577a7-e25f-4538-a47d-16b593401630, please check neutron logs for more information. [ 780.365249] env[62503]: ERROR nova.compute.manager [instance: cf6fb485-9672-42b5-ac88-bbf5e0941393] Traceback (most recent call last): [ 780.365249] env[62503]: ERROR nova.compute.manager [instance: cf6fb485-9672-42b5-ac88-bbf5e0941393] File "/opt/stack/nova/nova/compute/manager.py", line 2644, in _build_and_run_instance [ 780.365249] env[62503]: ERROR nova.compute.manager [instance: cf6fb485-9672-42b5-ac88-bbf5e0941393] self.driver.spawn(context, instance, image_meta, [ 780.365249] env[62503]: ERROR nova.compute.manager [instance: cf6fb485-9672-42b5-ac88-bbf5e0941393] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 780.365249] env[62503]: ERROR nova.compute.manager [instance: cf6fb485-9672-42b5-ac88-bbf5e0941393] self._vmops.spawn(context, instance, image_meta, injected_files, [ 780.365249] env[62503]: ERROR nova.compute.manager [instance: cf6fb485-9672-42b5-ac88-bbf5e0941393] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 780.365249] env[62503]: ERROR nova.compute.manager [instance: cf6fb485-9672-42b5-ac88-bbf5e0941393] vm_ref = self.build_virtual_machine(instance, [ 780.365249] env[62503]: ERROR nova.compute.manager [instance: cf6fb485-9672-42b5-ac88-bbf5e0941393] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 780.365249] env[62503]: ERROR nova.compute.manager [instance: cf6fb485-9672-42b5-ac88-bbf5e0941393] vif_infos = vmwarevif.get_vif_info(self._session, [ 780.365249] env[62503]: ERROR nova.compute.manager [instance: cf6fb485-9672-42b5-ac88-bbf5e0941393] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 780.365708] env[62503]: ERROR nova.compute.manager [instance: cf6fb485-9672-42b5-ac88-bbf5e0941393] for vif in network_info: [ 780.365708] env[62503]: ERROR nova.compute.manager [instance: cf6fb485-9672-42b5-ac88-bbf5e0941393] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 780.365708] env[62503]: ERROR nova.compute.manager [instance: cf6fb485-9672-42b5-ac88-bbf5e0941393] return self._sync_wrapper(fn, *args, **kwargs) [ 780.365708] env[62503]: ERROR nova.compute.manager [instance: cf6fb485-9672-42b5-ac88-bbf5e0941393] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 780.365708] env[62503]: ERROR nova.compute.manager [instance: cf6fb485-9672-42b5-ac88-bbf5e0941393] self.wait() [ 780.365708] env[62503]: ERROR nova.compute.manager [instance: cf6fb485-9672-42b5-ac88-bbf5e0941393] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 780.365708] env[62503]: ERROR nova.compute.manager [instance: cf6fb485-9672-42b5-ac88-bbf5e0941393] self[:] = self._gt.wait() [ 780.365708] env[62503]: ERROR nova.compute.manager [instance: cf6fb485-9672-42b5-ac88-bbf5e0941393] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 780.365708] env[62503]: ERROR nova.compute.manager [instance: cf6fb485-9672-42b5-ac88-bbf5e0941393] return self._exit_event.wait() [ 780.365708] env[62503]: ERROR nova.compute.manager [instance: cf6fb485-9672-42b5-ac88-bbf5e0941393] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 780.365708] env[62503]: ERROR nova.compute.manager [instance: cf6fb485-9672-42b5-ac88-bbf5e0941393] current.throw(*self._exc) [ 780.365708] env[62503]: ERROR nova.compute.manager [instance: cf6fb485-9672-42b5-ac88-bbf5e0941393] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 780.365708] env[62503]: ERROR nova.compute.manager [instance: cf6fb485-9672-42b5-ac88-bbf5e0941393] result = function(*args, **kwargs) [ 780.366104] env[62503]: ERROR nova.compute.manager [instance: cf6fb485-9672-42b5-ac88-bbf5e0941393] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 780.366104] env[62503]: ERROR nova.compute.manager [instance: cf6fb485-9672-42b5-ac88-bbf5e0941393] return func(*args, **kwargs) [ 780.366104] env[62503]: ERROR nova.compute.manager [instance: cf6fb485-9672-42b5-ac88-bbf5e0941393] File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 780.366104] env[62503]: ERROR nova.compute.manager [instance: cf6fb485-9672-42b5-ac88-bbf5e0941393] raise e [ 780.366104] env[62503]: ERROR nova.compute.manager [instance: cf6fb485-9672-42b5-ac88-bbf5e0941393] File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 780.366104] env[62503]: ERROR nova.compute.manager [instance: cf6fb485-9672-42b5-ac88-bbf5e0941393] nwinfo = self.network_api.allocate_for_instance( [ 780.366104] env[62503]: ERROR nova.compute.manager [instance: cf6fb485-9672-42b5-ac88-bbf5e0941393] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 780.366104] env[62503]: ERROR nova.compute.manager [instance: cf6fb485-9672-42b5-ac88-bbf5e0941393] created_port_ids = self._update_ports_for_instance( [ 780.366104] env[62503]: ERROR nova.compute.manager [instance: cf6fb485-9672-42b5-ac88-bbf5e0941393] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 780.366104] env[62503]: ERROR nova.compute.manager [instance: cf6fb485-9672-42b5-ac88-bbf5e0941393] with excutils.save_and_reraise_exception(): [ 780.366104] env[62503]: ERROR nova.compute.manager [instance: cf6fb485-9672-42b5-ac88-bbf5e0941393] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 780.366104] env[62503]: ERROR nova.compute.manager [instance: cf6fb485-9672-42b5-ac88-bbf5e0941393] self.force_reraise() [ 780.366104] env[62503]: ERROR nova.compute.manager [instance: cf6fb485-9672-42b5-ac88-bbf5e0941393] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 780.366478] env[62503]: ERROR nova.compute.manager [instance: cf6fb485-9672-42b5-ac88-bbf5e0941393] raise self.value [ 780.366478] env[62503]: ERROR nova.compute.manager [instance: cf6fb485-9672-42b5-ac88-bbf5e0941393] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 780.366478] env[62503]: ERROR nova.compute.manager [instance: cf6fb485-9672-42b5-ac88-bbf5e0941393] updated_port = self._update_port( [ 780.366478] env[62503]: ERROR nova.compute.manager [instance: cf6fb485-9672-42b5-ac88-bbf5e0941393] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 780.366478] env[62503]: ERROR nova.compute.manager [instance: cf6fb485-9672-42b5-ac88-bbf5e0941393] _ensure_no_port_binding_failure(port) [ 780.366478] env[62503]: ERROR nova.compute.manager [instance: cf6fb485-9672-42b5-ac88-bbf5e0941393] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 780.366478] env[62503]: ERROR nova.compute.manager [instance: cf6fb485-9672-42b5-ac88-bbf5e0941393] raise exception.PortBindingFailed(port_id=port['id']) [ 780.366478] env[62503]: ERROR nova.compute.manager [instance: cf6fb485-9672-42b5-ac88-bbf5e0941393] nova.exception.PortBindingFailed: Binding failed for port 24c577a7-e25f-4538-a47d-16b593401630, please check neutron logs for more information. [ 780.366478] env[62503]: ERROR nova.compute.manager [instance: cf6fb485-9672-42b5-ac88-bbf5e0941393] [ 780.366478] env[62503]: DEBUG nova.compute.utils [None req-7b8e1dad-5e5d-4734-b8ed-b199db0d06eb tempest-ServersNegativeTestMultiTenantJSON-701390068 tempest-ServersNegativeTestMultiTenantJSON-701390068-project-member] [instance: cf6fb485-9672-42b5-ac88-bbf5e0941393] Binding failed for port 24c577a7-e25f-4538-a47d-16b593401630, please check neutron logs for more information. {{(pid=62503) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 780.367218] env[62503]: DEBUG oslo_concurrency.lockutils [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 14.565s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 780.368914] env[62503]: DEBUG nova.compute.manager [None req-7b8e1dad-5e5d-4734-b8ed-b199db0d06eb tempest-ServersNegativeTestMultiTenantJSON-701390068 tempest-ServersNegativeTestMultiTenantJSON-701390068-project-member] [instance: cf6fb485-9672-42b5-ac88-bbf5e0941393] Build of instance cf6fb485-9672-42b5-ac88-bbf5e0941393 was re-scheduled: Binding failed for port 24c577a7-e25f-4538-a47d-16b593401630, please check neutron logs for more information. {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2483}} [ 780.369517] env[62503]: DEBUG nova.compute.manager [None req-7b8e1dad-5e5d-4734-b8ed-b199db0d06eb tempest-ServersNegativeTestMultiTenantJSON-701390068 tempest-ServersNegativeTestMultiTenantJSON-701390068-project-member] [instance: cf6fb485-9672-42b5-ac88-bbf5e0941393] Unplugging VIFs for instance {{(pid=62503) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3009}} [ 780.369517] env[62503]: DEBUG oslo_concurrency.lockutils [None req-7b8e1dad-5e5d-4734-b8ed-b199db0d06eb tempest-ServersNegativeTestMultiTenantJSON-701390068 tempest-ServersNegativeTestMultiTenantJSON-701390068-project-member] Acquiring lock "refresh_cache-cf6fb485-9672-42b5-ac88-bbf5e0941393" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 780.369657] env[62503]: DEBUG oslo_concurrency.lockutils [None req-7b8e1dad-5e5d-4734-b8ed-b199db0d06eb tempest-ServersNegativeTestMultiTenantJSON-701390068 tempest-ServersNegativeTestMultiTenantJSON-701390068-project-member] Acquired lock "refresh_cache-cf6fb485-9672-42b5-ac88-bbf5e0941393" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 780.369858] env[62503]: DEBUG nova.network.neutron [None req-7b8e1dad-5e5d-4734-b8ed-b199db0d06eb tempest-ServersNegativeTestMultiTenantJSON-701390068 tempest-ServersNegativeTestMultiTenantJSON-701390068-project-member] [instance: cf6fb485-9672-42b5-ac88-bbf5e0941393] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 780.560730] env[62503]: DEBUG oslo_concurrency.lockutils [None req-bed06986-6337-47b4-a3da-32c888b74905 tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] Releasing lock "refresh_cache-a4b600d2-b411-4957-92cb-7e8e462fde1d" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 780.561208] env[62503]: DEBUG nova.compute.manager [None req-bed06986-6337-47b4-a3da-32c888b74905 tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] [instance: a4b600d2-b411-4957-92cb-7e8e462fde1d] Start destroying the instance on the hypervisor. {{(pid=62503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3156}} [ 780.561441] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-bed06986-6337-47b4-a3da-32c888b74905 tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] [instance: a4b600d2-b411-4957-92cb-7e8e462fde1d] Destroying instance {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 780.561744] env[62503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e2765dff-96b4-404f-93be-1301cf894025 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.571058] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b304299a-2224-407a-b28c-631bd03aabb2 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.594117] env[62503]: WARNING nova.virt.vmwareapi.vmops [None req-bed06986-6337-47b4-a3da-32c888b74905 tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] [instance: a4b600d2-b411-4957-92cb-7e8e462fde1d] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance a4b600d2-b411-4957-92cb-7e8e462fde1d could not be found. [ 780.594117] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-bed06986-6337-47b4-a3da-32c888b74905 tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] [instance: a4b600d2-b411-4957-92cb-7e8e462fde1d] Instance destroyed {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 780.594117] env[62503]: INFO nova.compute.manager [None req-bed06986-6337-47b4-a3da-32c888b74905 tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] [instance: a4b600d2-b411-4957-92cb-7e8e462fde1d] Took 0.03 seconds to destroy the instance on the hypervisor. [ 780.594247] env[62503]: DEBUG oslo.service.loopingcall [None req-bed06986-6337-47b4-a3da-32c888b74905 tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 780.594387] env[62503]: DEBUG nova.compute.manager [-] [instance: a4b600d2-b411-4957-92cb-7e8e462fde1d] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 780.594479] env[62503]: DEBUG nova.network.neutron [-] [instance: a4b600d2-b411-4957-92cb-7e8e462fde1d] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 780.607235] env[62503]: DEBUG nova.network.neutron [-] [instance: a4b600d2-b411-4957-92cb-7e8e462fde1d] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 780.654219] env[62503]: INFO nova.compute.manager [None req-5f6b79ba-d091-4f79-8bf5-714be49e7af9 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] [instance: b9259ced-344a-42e5-835d-3713631a68c7] Took 1.02 seconds to deallocate network for instance. [ 780.890745] env[62503]: DEBUG nova.network.neutron [None req-7b8e1dad-5e5d-4734-b8ed-b199db0d06eb tempest-ServersNegativeTestMultiTenantJSON-701390068 tempest-ServersNegativeTestMultiTenantJSON-701390068-project-member] [instance: cf6fb485-9672-42b5-ac88-bbf5e0941393] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 780.966445] env[62503]: DEBUG nova.network.neutron [None req-7b8e1dad-5e5d-4734-b8ed-b199db0d06eb tempest-ServersNegativeTestMultiTenantJSON-701390068 tempest-ServersNegativeTestMultiTenantJSON-701390068-project-member] [instance: cf6fb485-9672-42b5-ac88-bbf5e0941393] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 781.110220] env[62503]: DEBUG nova.network.neutron [-] [instance: a4b600d2-b411-4957-92cb-7e8e462fde1d] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 781.399780] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Instance 141d7d04-0267-4e15-90ed-112ac8fb8c9b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 781.400089] env[62503]: WARNING nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Instance d4990c66-63d5-43b0-8187-2074c99ccde2 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 781.469193] env[62503]: DEBUG oslo_concurrency.lockutils [None req-7b8e1dad-5e5d-4734-b8ed-b199db0d06eb tempest-ServersNegativeTestMultiTenantJSON-701390068 tempest-ServersNegativeTestMultiTenantJSON-701390068-project-member] Releasing lock "refresh_cache-cf6fb485-9672-42b5-ac88-bbf5e0941393" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 781.469418] env[62503]: DEBUG nova.compute.manager [None req-7b8e1dad-5e5d-4734-b8ed-b199db0d06eb tempest-ServersNegativeTestMultiTenantJSON-701390068 tempest-ServersNegativeTestMultiTenantJSON-701390068-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62503) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3032}} [ 781.469598] env[62503]: DEBUG nova.compute.manager [None req-7b8e1dad-5e5d-4734-b8ed-b199db0d06eb tempest-ServersNegativeTestMultiTenantJSON-701390068 tempest-ServersNegativeTestMultiTenantJSON-701390068-project-member] [instance: cf6fb485-9672-42b5-ac88-bbf5e0941393] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 781.469759] env[62503]: DEBUG nova.network.neutron [None req-7b8e1dad-5e5d-4734-b8ed-b199db0d06eb tempest-ServersNegativeTestMultiTenantJSON-701390068 tempest-ServersNegativeTestMultiTenantJSON-701390068-project-member] [instance: cf6fb485-9672-42b5-ac88-bbf5e0941393] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 781.485311] env[62503]: DEBUG nova.network.neutron [None req-7b8e1dad-5e5d-4734-b8ed-b199db0d06eb tempest-ServersNegativeTestMultiTenantJSON-701390068 tempest-ServersNegativeTestMultiTenantJSON-701390068-project-member] [instance: cf6fb485-9672-42b5-ac88-bbf5e0941393] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 781.613357] env[62503]: INFO nova.compute.manager [-] [instance: a4b600d2-b411-4957-92cb-7e8e462fde1d] Took 1.02 seconds to deallocate network for instance. [ 781.616218] env[62503]: DEBUG nova.compute.claims [None req-bed06986-6337-47b4-a3da-32c888b74905 tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] [instance: a4b600d2-b411-4957-92cb-7e8e462fde1d] Aborting claim: {{(pid=62503) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 781.616373] env[62503]: DEBUG oslo_concurrency.lockutils [None req-bed06986-6337-47b4-a3da-32c888b74905 tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 781.683892] env[62503]: INFO nova.scheduler.client.report [None req-5f6b79ba-d091-4f79-8bf5-714be49e7af9 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] Deleted allocations for instance b9259ced-344a-42e5-835d-3713631a68c7 [ 781.903524] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Instance b9259ced-344a-42e5-835d-3713631a68c7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 781.988149] env[62503]: DEBUG nova.network.neutron [None req-7b8e1dad-5e5d-4734-b8ed-b199db0d06eb tempest-ServersNegativeTestMultiTenantJSON-701390068 tempest-ServersNegativeTestMultiTenantJSON-701390068-project-member] [instance: cf6fb485-9672-42b5-ac88-bbf5e0941393] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 782.192139] env[62503]: DEBUG oslo_concurrency.lockutils [None req-5f6b79ba-d091-4f79-8bf5-714be49e7af9 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] Lock "b9259ced-344a-42e5-835d-3713631a68c7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 133.192s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 782.408266] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Instance cf6fb485-9672-42b5-ac88-bbf5e0941393 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 782.408266] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Instance ff56659a-18f8-44c5-ab10-872e636a9357 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 782.408266] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Instance 529e6f8e-49b9-46a7-a09f-17238522f7bc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 782.408266] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Instance ad7badc9-cb11-4532-885a-28fb3d4de9ef actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 782.408690] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Instance 32d4fda5-6d30-4416-b187-cf5548cb23bf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 782.408690] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Instance a4b600d2-b411-4957-92cb-7e8e462fde1d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 782.490325] env[62503]: INFO nova.compute.manager [None req-7b8e1dad-5e5d-4734-b8ed-b199db0d06eb tempest-ServersNegativeTestMultiTenantJSON-701390068 tempest-ServersNegativeTestMultiTenantJSON-701390068-project-member] [instance: cf6fb485-9672-42b5-ac88-bbf5e0941393] Took 1.02 seconds to deallocate network for instance. [ 782.694797] env[62503]: DEBUG nova.compute.manager [None req-0e26226b-2af4-47a7-8eef-0a1c99867cff tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] [instance: 86422990-4215-4628-a7a7-4fdc910e304e] Starting instance... {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 782.910679] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Instance 9eee91c6-a949-453b-8ccd-ba986251ed27 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 783.220782] env[62503]: DEBUG oslo_concurrency.lockutils [None req-0e26226b-2af4-47a7-8eef-0a1c99867cff tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 783.415030] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Instance f6f17748-815c-417f-bce6-3bc97f23b637 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 783.519434] env[62503]: INFO nova.scheduler.client.report [None req-7b8e1dad-5e5d-4734-b8ed-b199db0d06eb tempest-ServersNegativeTestMultiTenantJSON-701390068 tempest-ServersNegativeTestMultiTenantJSON-701390068-project-member] Deleted allocations for instance cf6fb485-9672-42b5-ac88-bbf5e0941393 [ 783.919181] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Instance 1251e59f-9c01-4115-8400-40aacedd97e2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 784.031049] env[62503]: DEBUG oslo_concurrency.lockutils [None req-7b8e1dad-5e5d-4734-b8ed-b199db0d06eb tempest-ServersNegativeTestMultiTenantJSON-701390068 tempest-ServersNegativeTestMultiTenantJSON-701390068-project-member] Lock "cf6fb485-9672-42b5-ac88-bbf5e0941393" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 133.683s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 784.140738] env[62503]: DEBUG oslo_concurrency.lockutils [None req-d5cf39b4-4dfb-4634-a37c-ca8390f95f74 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] Acquiring lock "04b9ed30-2cd0-4c07-9141-76f0f53fefb4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 784.140738] env[62503]: DEBUG oslo_concurrency.lockutils [None req-d5cf39b4-4dfb-4634-a37c-ca8390f95f74 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] Lock "04b9ed30-2cd0-4c07-9141-76f0f53fefb4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 784.421648] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Instance 86422990-4215-4628-a7a7-4fdc910e304e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 784.534722] env[62503]: DEBUG nova.compute.manager [None req-337e5891-71cd-4f0a-a7c5-d668c349bcf9 tempest-ServersAaction247Test-815974925 tempest-ServersAaction247Test-815974925-project-member] [instance: ef92e4ba-4ef3-4e26-9577-bad0c046ed47] Starting instance... {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 784.925427] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Instance ef92e4ba-4ef3-4e26-9577-bad0c046ed47 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 785.057622] env[62503]: DEBUG oslo_concurrency.lockutils [None req-337e5891-71cd-4f0a-a7c5-d668c349bcf9 tempest-ServersAaction247Test-815974925 tempest-ServersAaction247Test-815974925-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 785.430052] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Instance 4cb117e3-ff57-4e7f-bb2b-a12c988e362c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 785.935694] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Instance 09688e22-9225-4619-a9aa-eddb332cb8ab has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 786.440228] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Instance b1fc7438-2078-435a-9754-19a8a1bc6f5c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 786.943823] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Instance 4deb28e7-351b-41b7-90bb-afdde200f7fa has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 787.447022] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Instance 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 787.950221] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Instance c5f2cc73-6bcd-4422-890b-3299d4cf4534 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 788.454584] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Instance 1e355e38-60c6-4e7f-beb4-160c4527ec51 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 788.454888] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Total usable vcpus: 48, total allocated vcpus: 6 {{(pid=62503) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 788.455158] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1664MB phys_disk=200GB used_disk=6GB total_vcpus=48 used_vcpus=6 pci_stats=[] {{(pid=62503) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 788.677633] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f682052-e3ff-4ef9-8c1a-8d1d5d44c5a0 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.685723] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aae49f9d-7dc7-4d85-a982-db957b2af366 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.716623] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bf40e46-2978-463e-b6f0-65211848a5e3 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.724849] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-116b9e78-7524-4929-b947-9e67736388b7 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.737810] env[62503]: DEBUG nova.compute.provider_tree [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 789.240902] env[62503]: DEBUG nova.scheduler.client.report [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 789.747404] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62503) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 789.747714] env[62503]: DEBUG oslo_concurrency.lockutils [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 9.380s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 789.747878] env[62503]: DEBUG oslo_concurrency.lockutils [None req-be269e0c-079d-42ba-b169-f1c6935976f8 tempest-ServerMetadataTestJSON-2107799143 tempest-ServerMetadataTestJSON-2107799143-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 23.307s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 789.750699] env[62503]: DEBUG oslo_service.periodic_task [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=62503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 789.750867] env[62503]: DEBUG nova.compute.manager [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Cleaning up deleted instances {{(pid=62503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11524}} [ 790.257085] env[62503]: DEBUG nova.compute.manager [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] There are 2 instances to clean {{(pid=62503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11533}} [ 790.257085] env[62503]: DEBUG nova.compute.manager [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] [instance: c4a88e75-690f-4bed-a4f9-a0de3b193eff] Instance has had 0 of 5 cleanup attempts {{(pid=62503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11537}} [ 790.460077] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8588e48-14b7-4f35-8ac6-c396380f936f {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.467177] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bc20dbe-0ccd-4d9a-b24e-359445c37dd9 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.495658] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bf04fff-e626-4713-bb36-6246012ea587 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.502206] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8e05ab3-aeb9-485b-b156-375d3d47af34 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.514500] env[62503]: DEBUG nova.compute.provider_tree [None req-be269e0c-079d-42ba-b169-f1c6935976f8 tempest-ServerMetadataTestJSON-2107799143 tempest-ServerMetadataTestJSON-2107799143-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 790.761611] env[62503]: DEBUG nova.compute.manager [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] [instance: 90e42997-a34c-4a39-8d2f-7ab0ed19f028] Instance has had 0 of 5 cleanup attempts {{(pid=62503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11537}} [ 791.017220] env[62503]: DEBUG nova.scheduler.client.report [None req-be269e0c-079d-42ba-b169-f1c6935976f8 tempest-ServerMetadataTestJSON-2107799143 tempest-ServerMetadataTestJSON-2107799143-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 791.263278] env[62503]: DEBUG oslo_service.periodic_task [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=62503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 791.263421] env[62503]: DEBUG nova.compute.manager [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Cleaning up deleted instances with incomplete migration {{(pid=62503) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11562}} [ 791.527919] env[62503]: DEBUG oslo_concurrency.lockutils [None req-be269e0c-079d-42ba-b169-f1c6935976f8 tempest-ServerMetadataTestJSON-2107799143 tempest-ServerMetadataTestJSON-2107799143-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.780s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 791.528618] env[62503]: ERROR nova.compute.manager [None req-be269e0c-079d-42ba-b169-f1c6935976f8 tempest-ServerMetadataTestJSON-2107799143 tempest-ServerMetadataTestJSON-2107799143-project-member] [instance: ff56659a-18f8-44c5-ab10-872e636a9357] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port c1062fca-9848-40d4-bc22-18b2ce47b6c7, please check neutron logs for more information. [ 791.528618] env[62503]: ERROR nova.compute.manager [instance: ff56659a-18f8-44c5-ab10-872e636a9357] Traceback (most recent call last): [ 791.528618] env[62503]: ERROR nova.compute.manager [instance: ff56659a-18f8-44c5-ab10-872e636a9357] File "/opt/stack/nova/nova/compute/manager.py", line 2644, in _build_and_run_instance [ 791.528618] env[62503]: ERROR nova.compute.manager [instance: ff56659a-18f8-44c5-ab10-872e636a9357] self.driver.spawn(context, instance, image_meta, [ 791.528618] env[62503]: ERROR nova.compute.manager [instance: ff56659a-18f8-44c5-ab10-872e636a9357] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 791.528618] env[62503]: ERROR nova.compute.manager [instance: ff56659a-18f8-44c5-ab10-872e636a9357] self._vmops.spawn(context, instance, image_meta, injected_files, [ 791.528618] env[62503]: ERROR nova.compute.manager [instance: ff56659a-18f8-44c5-ab10-872e636a9357] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 791.528618] env[62503]: ERROR nova.compute.manager [instance: ff56659a-18f8-44c5-ab10-872e636a9357] vm_ref = self.build_virtual_machine(instance, [ 791.528618] env[62503]: ERROR nova.compute.manager [instance: ff56659a-18f8-44c5-ab10-872e636a9357] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 791.528618] env[62503]: ERROR nova.compute.manager [instance: ff56659a-18f8-44c5-ab10-872e636a9357] vif_infos = vmwarevif.get_vif_info(self._session, [ 791.528618] env[62503]: ERROR nova.compute.manager [instance: ff56659a-18f8-44c5-ab10-872e636a9357] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 791.528984] env[62503]: ERROR nova.compute.manager [instance: ff56659a-18f8-44c5-ab10-872e636a9357] for vif in network_info: [ 791.528984] env[62503]: ERROR nova.compute.manager [instance: ff56659a-18f8-44c5-ab10-872e636a9357] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 791.528984] env[62503]: ERROR nova.compute.manager [instance: ff56659a-18f8-44c5-ab10-872e636a9357] return self._sync_wrapper(fn, *args, **kwargs) [ 791.528984] env[62503]: ERROR nova.compute.manager [instance: ff56659a-18f8-44c5-ab10-872e636a9357] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 791.528984] env[62503]: ERROR nova.compute.manager [instance: ff56659a-18f8-44c5-ab10-872e636a9357] self.wait() [ 791.528984] env[62503]: ERROR nova.compute.manager [instance: ff56659a-18f8-44c5-ab10-872e636a9357] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 791.528984] env[62503]: ERROR nova.compute.manager [instance: ff56659a-18f8-44c5-ab10-872e636a9357] self[:] = self._gt.wait() [ 791.528984] env[62503]: ERROR nova.compute.manager [instance: ff56659a-18f8-44c5-ab10-872e636a9357] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 791.528984] env[62503]: ERROR nova.compute.manager [instance: ff56659a-18f8-44c5-ab10-872e636a9357] return self._exit_event.wait() [ 791.528984] env[62503]: ERROR nova.compute.manager [instance: ff56659a-18f8-44c5-ab10-872e636a9357] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 791.528984] env[62503]: ERROR nova.compute.manager [instance: ff56659a-18f8-44c5-ab10-872e636a9357] current.throw(*self._exc) [ 791.528984] env[62503]: ERROR nova.compute.manager [instance: ff56659a-18f8-44c5-ab10-872e636a9357] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 791.528984] env[62503]: ERROR nova.compute.manager [instance: ff56659a-18f8-44c5-ab10-872e636a9357] result = function(*args, **kwargs) [ 791.529411] env[62503]: ERROR nova.compute.manager [instance: ff56659a-18f8-44c5-ab10-872e636a9357] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 791.529411] env[62503]: ERROR nova.compute.manager [instance: ff56659a-18f8-44c5-ab10-872e636a9357] return func(*args, **kwargs) [ 791.529411] env[62503]: ERROR nova.compute.manager [instance: ff56659a-18f8-44c5-ab10-872e636a9357] File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 791.529411] env[62503]: ERROR nova.compute.manager [instance: ff56659a-18f8-44c5-ab10-872e636a9357] raise e [ 791.529411] env[62503]: ERROR nova.compute.manager [instance: ff56659a-18f8-44c5-ab10-872e636a9357] File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 791.529411] env[62503]: ERROR nova.compute.manager [instance: ff56659a-18f8-44c5-ab10-872e636a9357] nwinfo = self.network_api.allocate_for_instance( [ 791.529411] env[62503]: ERROR nova.compute.manager [instance: ff56659a-18f8-44c5-ab10-872e636a9357] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 791.529411] env[62503]: ERROR nova.compute.manager [instance: ff56659a-18f8-44c5-ab10-872e636a9357] created_port_ids = self._update_ports_for_instance( [ 791.529411] env[62503]: ERROR nova.compute.manager [instance: ff56659a-18f8-44c5-ab10-872e636a9357] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 791.529411] env[62503]: ERROR nova.compute.manager [instance: ff56659a-18f8-44c5-ab10-872e636a9357] with excutils.save_and_reraise_exception(): [ 791.529411] env[62503]: ERROR nova.compute.manager [instance: ff56659a-18f8-44c5-ab10-872e636a9357] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 791.529411] env[62503]: ERROR nova.compute.manager [instance: ff56659a-18f8-44c5-ab10-872e636a9357] self.force_reraise() [ 791.529411] env[62503]: ERROR nova.compute.manager [instance: ff56659a-18f8-44c5-ab10-872e636a9357] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 791.529781] env[62503]: ERROR nova.compute.manager [instance: ff56659a-18f8-44c5-ab10-872e636a9357] raise self.value [ 791.529781] env[62503]: ERROR nova.compute.manager [instance: ff56659a-18f8-44c5-ab10-872e636a9357] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 791.529781] env[62503]: ERROR nova.compute.manager [instance: ff56659a-18f8-44c5-ab10-872e636a9357] updated_port = self._update_port( [ 791.529781] env[62503]: ERROR nova.compute.manager [instance: ff56659a-18f8-44c5-ab10-872e636a9357] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 791.529781] env[62503]: ERROR nova.compute.manager [instance: ff56659a-18f8-44c5-ab10-872e636a9357] _ensure_no_port_binding_failure(port) [ 791.529781] env[62503]: ERROR nova.compute.manager [instance: ff56659a-18f8-44c5-ab10-872e636a9357] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 791.529781] env[62503]: ERROR nova.compute.manager [instance: ff56659a-18f8-44c5-ab10-872e636a9357] raise exception.PortBindingFailed(port_id=port['id']) [ 791.529781] env[62503]: ERROR nova.compute.manager [instance: ff56659a-18f8-44c5-ab10-872e636a9357] nova.exception.PortBindingFailed: Binding failed for port c1062fca-9848-40d4-bc22-18b2ce47b6c7, please check neutron logs for more information. [ 791.529781] env[62503]: ERROR nova.compute.manager [instance: ff56659a-18f8-44c5-ab10-872e636a9357] [ 791.529781] env[62503]: DEBUG nova.compute.utils [None req-be269e0c-079d-42ba-b169-f1c6935976f8 tempest-ServerMetadataTestJSON-2107799143 tempest-ServerMetadataTestJSON-2107799143-project-member] [instance: ff56659a-18f8-44c5-ab10-872e636a9357] Binding failed for port c1062fca-9848-40d4-bc22-18b2ce47b6c7, please check neutron logs for more information. {{(pid=62503) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 791.531667] env[62503]: DEBUG oslo_concurrency.lockutils [None req-fc106d5b-533d-42f2-9e23-d15b663f98ba tempest-ServerDiagnosticsNegativeTest-1726197908 tempest-ServerDiagnosticsNegativeTest-1726197908-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 23.499s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 791.536414] env[62503]: DEBUG nova.compute.manager [None req-be269e0c-079d-42ba-b169-f1c6935976f8 tempest-ServerMetadataTestJSON-2107799143 tempest-ServerMetadataTestJSON-2107799143-project-member] [instance: ff56659a-18f8-44c5-ab10-872e636a9357] Build of instance ff56659a-18f8-44c5-ab10-872e636a9357 was re-scheduled: Binding failed for port c1062fca-9848-40d4-bc22-18b2ce47b6c7, please check neutron logs for more information. {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2483}} [ 791.537246] env[62503]: DEBUG nova.compute.manager [None req-be269e0c-079d-42ba-b169-f1c6935976f8 tempest-ServerMetadataTestJSON-2107799143 tempest-ServerMetadataTestJSON-2107799143-project-member] [instance: ff56659a-18f8-44c5-ab10-872e636a9357] Unplugging VIFs for instance {{(pid=62503) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3009}} [ 791.537597] env[62503]: DEBUG oslo_concurrency.lockutils [None req-be269e0c-079d-42ba-b169-f1c6935976f8 tempest-ServerMetadataTestJSON-2107799143 tempest-ServerMetadataTestJSON-2107799143-project-member] Acquiring lock "refresh_cache-ff56659a-18f8-44c5-ab10-872e636a9357" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 791.537877] env[62503]: DEBUG oslo_concurrency.lockutils [None req-be269e0c-079d-42ba-b169-f1c6935976f8 tempest-ServerMetadataTestJSON-2107799143 tempest-ServerMetadataTestJSON-2107799143-project-member] Acquired lock "refresh_cache-ff56659a-18f8-44c5-ab10-872e636a9357" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 791.538166] env[62503]: DEBUG nova.network.neutron [None req-be269e0c-079d-42ba-b169-f1c6935976f8 tempest-ServerMetadataTestJSON-2107799143 tempest-ServerMetadataTestJSON-2107799143-project-member] [instance: ff56659a-18f8-44c5-ab10-872e636a9357] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 791.766619] env[62503]: DEBUG oslo_service.periodic_task [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=62503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 792.057523] env[62503]: DEBUG nova.network.neutron [None req-be269e0c-079d-42ba-b169-f1c6935976f8 tempest-ServerMetadataTestJSON-2107799143 tempest-ServerMetadataTestJSON-2107799143-project-member] [instance: ff56659a-18f8-44c5-ab10-872e636a9357] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 792.145177] env[62503]: DEBUG nova.network.neutron [None req-be269e0c-079d-42ba-b169-f1c6935976f8 tempest-ServerMetadataTestJSON-2107799143 tempest-ServerMetadataTestJSON-2107799143-project-member] [instance: ff56659a-18f8-44c5-ab10-872e636a9357] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 792.264582] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7fd18b6-9cd3-45f6-9810-2f8fa8b8011f {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.272279] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a639ceb-5a4d-4dbe-80c2-ee1f3519c41f {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.301758] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c11515bb-06d0-497d-95e1-4ee3ad2548a6 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.308892] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1b0223d-4f2a-41f3-962d-e0fcb621b801 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.322104] env[62503]: DEBUG nova.compute.provider_tree [None req-fc106d5b-533d-42f2-9e23-d15b663f98ba tempest-ServerDiagnosticsNegativeTest-1726197908 tempest-ServerDiagnosticsNegativeTest-1726197908-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 792.648235] env[62503]: DEBUG oslo_concurrency.lockutils [None req-be269e0c-079d-42ba-b169-f1c6935976f8 tempest-ServerMetadataTestJSON-2107799143 tempest-ServerMetadataTestJSON-2107799143-project-member] Releasing lock "refresh_cache-ff56659a-18f8-44c5-ab10-872e636a9357" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 792.648625] env[62503]: DEBUG nova.compute.manager [None req-be269e0c-079d-42ba-b169-f1c6935976f8 tempest-ServerMetadataTestJSON-2107799143 tempest-ServerMetadataTestJSON-2107799143-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62503) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3032}} [ 792.648850] env[62503]: DEBUG nova.compute.manager [None req-be269e0c-079d-42ba-b169-f1c6935976f8 tempest-ServerMetadataTestJSON-2107799143 tempest-ServerMetadataTestJSON-2107799143-project-member] [instance: ff56659a-18f8-44c5-ab10-872e636a9357] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 792.649041] env[62503]: DEBUG nova.network.neutron [None req-be269e0c-079d-42ba-b169-f1c6935976f8 tempest-ServerMetadataTestJSON-2107799143 tempest-ServerMetadataTestJSON-2107799143-project-member] [instance: ff56659a-18f8-44c5-ab10-872e636a9357] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 792.662268] env[62503]: DEBUG nova.network.neutron [None req-be269e0c-079d-42ba-b169-f1c6935976f8 tempest-ServerMetadataTestJSON-2107799143 tempest-ServerMetadataTestJSON-2107799143-project-member] [instance: ff56659a-18f8-44c5-ab10-872e636a9357] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 792.825842] env[62503]: DEBUG nova.scheduler.client.report [None req-fc106d5b-533d-42f2-9e23-d15b663f98ba tempest-ServerDiagnosticsNegativeTest-1726197908 tempest-ServerDiagnosticsNegativeTest-1726197908-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 793.164917] env[62503]: DEBUG nova.network.neutron [None req-be269e0c-079d-42ba-b169-f1c6935976f8 tempest-ServerMetadataTestJSON-2107799143 tempest-ServerMetadataTestJSON-2107799143-project-member] [instance: ff56659a-18f8-44c5-ab10-872e636a9357] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 793.331272] env[62503]: DEBUG oslo_concurrency.lockutils [None req-fc106d5b-533d-42f2-9e23-d15b663f98ba tempest-ServerDiagnosticsNegativeTest-1726197908 tempest-ServerDiagnosticsNegativeTest-1726197908-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.799s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 793.331913] env[62503]: ERROR nova.compute.manager [None req-fc106d5b-533d-42f2-9e23-d15b663f98ba tempest-ServerDiagnosticsNegativeTest-1726197908 tempest-ServerDiagnosticsNegativeTest-1726197908-project-member] [instance: 529e6f8e-49b9-46a7-a09f-17238522f7bc] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 6db65cc5-b279-427b-812c-0c5fc0661b67, please check neutron logs for more information. [ 793.331913] env[62503]: ERROR nova.compute.manager [instance: 529e6f8e-49b9-46a7-a09f-17238522f7bc] Traceback (most recent call last): [ 793.331913] env[62503]: ERROR nova.compute.manager [instance: 529e6f8e-49b9-46a7-a09f-17238522f7bc] File "/opt/stack/nova/nova/compute/manager.py", line 2644, in _build_and_run_instance [ 793.331913] env[62503]: ERROR nova.compute.manager [instance: 529e6f8e-49b9-46a7-a09f-17238522f7bc] self.driver.spawn(context, instance, image_meta, [ 793.331913] env[62503]: ERROR nova.compute.manager [instance: 529e6f8e-49b9-46a7-a09f-17238522f7bc] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 793.331913] env[62503]: ERROR nova.compute.manager [instance: 529e6f8e-49b9-46a7-a09f-17238522f7bc] self._vmops.spawn(context, instance, image_meta, injected_files, [ 793.331913] env[62503]: ERROR nova.compute.manager [instance: 529e6f8e-49b9-46a7-a09f-17238522f7bc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 793.331913] env[62503]: ERROR nova.compute.manager [instance: 529e6f8e-49b9-46a7-a09f-17238522f7bc] vm_ref = self.build_virtual_machine(instance, [ 793.331913] env[62503]: ERROR nova.compute.manager [instance: 529e6f8e-49b9-46a7-a09f-17238522f7bc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 793.331913] env[62503]: ERROR nova.compute.manager [instance: 529e6f8e-49b9-46a7-a09f-17238522f7bc] vif_infos = vmwarevif.get_vif_info(self._session, [ 793.331913] env[62503]: ERROR nova.compute.manager [instance: 529e6f8e-49b9-46a7-a09f-17238522f7bc] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 793.332284] env[62503]: ERROR nova.compute.manager [instance: 529e6f8e-49b9-46a7-a09f-17238522f7bc] for vif in network_info: [ 793.332284] env[62503]: ERROR nova.compute.manager [instance: 529e6f8e-49b9-46a7-a09f-17238522f7bc] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 793.332284] env[62503]: ERROR nova.compute.manager [instance: 529e6f8e-49b9-46a7-a09f-17238522f7bc] return self._sync_wrapper(fn, *args, **kwargs) [ 793.332284] env[62503]: ERROR nova.compute.manager [instance: 529e6f8e-49b9-46a7-a09f-17238522f7bc] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 793.332284] env[62503]: ERROR nova.compute.manager [instance: 529e6f8e-49b9-46a7-a09f-17238522f7bc] self.wait() [ 793.332284] env[62503]: ERROR nova.compute.manager [instance: 529e6f8e-49b9-46a7-a09f-17238522f7bc] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 793.332284] env[62503]: ERROR nova.compute.manager [instance: 529e6f8e-49b9-46a7-a09f-17238522f7bc] self[:] = self._gt.wait() [ 793.332284] env[62503]: ERROR nova.compute.manager [instance: 529e6f8e-49b9-46a7-a09f-17238522f7bc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 793.332284] env[62503]: ERROR nova.compute.manager [instance: 529e6f8e-49b9-46a7-a09f-17238522f7bc] return self._exit_event.wait() [ 793.332284] env[62503]: ERROR nova.compute.manager [instance: 529e6f8e-49b9-46a7-a09f-17238522f7bc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 793.332284] env[62503]: ERROR nova.compute.manager [instance: 529e6f8e-49b9-46a7-a09f-17238522f7bc] current.throw(*self._exc) [ 793.332284] env[62503]: ERROR nova.compute.manager [instance: 529e6f8e-49b9-46a7-a09f-17238522f7bc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 793.332284] env[62503]: ERROR nova.compute.manager [instance: 529e6f8e-49b9-46a7-a09f-17238522f7bc] result = function(*args, **kwargs) [ 793.332859] env[62503]: ERROR nova.compute.manager [instance: 529e6f8e-49b9-46a7-a09f-17238522f7bc] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 793.332859] env[62503]: ERROR nova.compute.manager [instance: 529e6f8e-49b9-46a7-a09f-17238522f7bc] return func(*args, **kwargs) [ 793.332859] env[62503]: ERROR nova.compute.manager [instance: 529e6f8e-49b9-46a7-a09f-17238522f7bc] File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 793.332859] env[62503]: ERROR nova.compute.manager [instance: 529e6f8e-49b9-46a7-a09f-17238522f7bc] raise e [ 793.332859] env[62503]: ERROR nova.compute.manager [instance: 529e6f8e-49b9-46a7-a09f-17238522f7bc] File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 793.332859] env[62503]: ERROR nova.compute.manager [instance: 529e6f8e-49b9-46a7-a09f-17238522f7bc] nwinfo = self.network_api.allocate_for_instance( [ 793.332859] env[62503]: ERROR nova.compute.manager [instance: 529e6f8e-49b9-46a7-a09f-17238522f7bc] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 793.332859] env[62503]: ERROR nova.compute.manager [instance: 529e6f8e-49b9-46a7-a09f-17238522f7bc] created_port_ids = self._update_ports_for_instance( [ 793.332859] env[62503]: ERROR nova.compute.manager [instance: 529e6f8e-49b9-46a7-a09f-17238522f7bc] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 793.332859] env[62503]: ERROR nova.compute.manager [instance: 529e6f8e-49b9-46a7-a09f-17238522f7bc] with excutils.save_and_reraise_exception(): [ 793.332859] env[62503]: ERROR nova.compute.manager [instance: 529e6f8e-49b9-46a7-a09f-17238522f7bc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 793.332859] env[62503]: ERROR nova.compute.manager [instance: 529e6f8e-49b9-46a7-a09f-17238522f7bc] self.force_reraise() [ 793.332859] env[62503]: ERROR nova.compute.manager [instance: 529e6f8e-49b9-46a7-a09f-17238522f7bc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 793.333466] env[62503]: ERROR nova.compute.manager [instance: 529e6f8e-49b9-46a7-a09f-17238522f7bc] raise self.value [ 793.333466] env[62503]: ERROR nova.compute.manager [instance: 529e6f8e-49b9-46a7-a09f-17238522f7bc] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 793.333466] env[62503]: ERROR nova.compute.manager [instance: 529e6f8e-49b9-46a7-a09f-17238522f7bc] updated_port = self._update_port( [ 793.333466] env[62503]: ERROR nova.compute.manager [instance: 529e6f8e-49b9-46a7-a09f-17238522f7bc] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 793.333466] env[62503]: ERROR nova.compute.manager [instance: 529e6f8e-49b9-46a7-a09f-17238522f7bc] _ensure_no_port_binding_failure(port) [ 793.333466] env[62503]: ERROR nova.compute.manager [instance: 529e6f8e-49b9-46a7-a09f-17238522f7bc] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 793.333466] env[62503]: ERROR nova.compute.manager [instance: 529e6f8e-49b9-46a7-a09f-17238522f7bc] raise exception.PortBindingFailed(port_id=port['id']) [ 793.333466] env[62503]: ERROR nova.compute.manager [instance: 529e6f8e-49b9-46a7-a09f-17238522f7bc] nova.exception.PortBindingFailed: Binding failed for port 6db65cc5-b279-427b-812c-0c5fc0661b67, please check neutron logs for more information. [ 793.333466] env[62503]: ERROR nova.compute.manager [instance: 529e6f8e-49b9-46a7-a09f-17238522f7bc] [ 793.333466] env[62503]: DEBUG nova.compute.utils [None req-fc106d5b-533d-42f2-9e23-d15b663f98ba tempest-ServerDiagnosticsNegativeTest-1726197908 tempest-ServerDiagnosticsNegativeTest-1726197908-project-member] [instance: 529e6f8e-49b9-46a7-a09f-17238522f7bc] Binding failed for port 6db65cc5-b279-427b-812c-0c5fc0661b67, please check neutron logs for more information. {{(pid=62503) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 793.334361] env[62503]: DEBUG nova.compute.manager [None req-fc106d5b-533d-42f2-9e23-d15b663f98ba tempest-ServerDiagnosticsNegativeTest-1726197908 tempest-ServerDiagnosticsNegativeTest-1726197908-project-member] [instance: 529e6f8e-49b9-46a7-a09f-17238522f7bc] Build of instance 529e6f8e-49b9-46a7-a09f-17238522f7bc was re-scheduled: Binding failed for port 6db65cc5-b279-427b-812c-0c5fc0661b67, please check neutron logs for more information. {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2483}} [ 793.334752] env[62503]: DEBUG nova.compute.manager [None req-fc106d5b-533d-42f2-9e23-d15b663f98ba tempest-ServerDiagnosticsNegativeTest-1726197908 tempest-ServerDiagnosticsNegativeTest-1726197908-project-member] [instance: 529e6f8e-49b9-46a7-a09f-17238522f7bc] Unplugging VIFs for instance {{(pid=62503) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3009}} [ 793.334978] env[62503]: DEBUG oslo_concurrency.lockutils [None req-fc106d5b-533d-42f2-9e23-d15b663f98ba tempest-ServerDiagnosticsNegativeTest-1726197908 tempest-ServerDiagnosticsNegativeTest-1726197908-project-member] Acquiring lock "refresh_cache-529e6f8e-49b9-46a7-a09f-17238522f7bc" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 793.335135] env[62503]: DEBUG oslo_concurrency.lockutils [None req-fc106d5b-533d-42f2-9e23-d15b663f98ba tempest-ServerDiagnosticsNegativeTest-1726197908 tempest-ServerDiagnosticsNegativeTest-1726197908-project-member] Acquired lock "refresh_cache-529e6f8e-49b9-46a7-a09f-17238522f7bc" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 793.335292] env[62503]: DEBUG nova.network.neutron [None req-fc106d5b-533d-42f2-9e23-d15b663f98ba tempest-ServerDiagnosticsNegativeTest-1726197908 tempest-ServerDiagnosticsNegativeTest-1726197908-project-member] [instance: 529e6f8e-49b9-46a7-a09f-17238522f7bc] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 793.336255] env[62503]: DEBUG oslo_concurrency.lockutils [None req-0b764e2d-6aca-40d5-ae09-46fe33425281 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 24.962s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 793.336454] env[62503]: DEBUG nova.objects.instance [None req-0b764e2d-6aca-40d5-ae09-46fe33425281 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] [instance: d4990c66-63d5-43b0-8187-2074c99ccde2] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62503) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 794.120691] env[62503]: INFO nova.compute.manager [None req-be269e0c-079d-42ba-b169-f1c6935976f8 tempest-ServerMetadataTestJSON-2107799143 tempest-ServerMetadataTestJSON-2107799143-project-member] [instance: ff56659a-18f8-44c5-ab10-872e636a9357] Took 1.47 seconds to deallocate network for instance. [ 794.141950] env[62503]: DEBUG nova.network.neutron [None req-fc106d5b-533d-42f2-9e23-d15b663f98ba tempest-ServerDiagnosticsNegativeTest-1726197908 tempest-ServerDiagnosticsNegativeTest-1726197908-project-member] [instance: 529e6f8e-49b9-46a7-a09f-17238522f7bc] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 794.187966] env[62503]: DEBUG nova.network.neutron [None req-fc106d5b-533d-42f2-9e23-d15b663f98ba tempest-ServerDiagnosticsNegativeTest-1726197908 tempest-ServerDiagnosticsNegativeTest-1726197908-project-member] [instance: 529e6f8e-49b9-46a7-a09f-17238522f7bc] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 794.631958] env[62503]: DEBUG oslo_concurrency.lockutils [None req-0b764e2d-6aca-40d5-ae09-46fe33425281 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.296s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 794.633083] env[62503]: DEBUG oslo_concurrency.lockutils [None req-2d6004ed-f8ef-4f69-80bc-a0a595a2cd5a tempest-AttachVolumeTestJSON-1238928908 tempest-AttachVolumeTestJSON-1238928908-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.178s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 794.634428] env[62503]: INFO nova.compute.claims [None req-2d6004ed-f8ef-4f69-80bc-a0a595a2cd5a tempest-AttachVolumeTestJSON-1238928908 tempest-AttachVolumeTestJSON-1238928908-project-member] [instance: 9eee91c6-a949-453b-8ccd-ba986251ed27] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 794.690695] env[62503]: DEBUG oslo_concurrency.lockutils [None req-fc106d5b-533d-42f2-9e23-d15b663f98ba tempest-ServerDiagnosticsNegativeTest-1726197908 tempest-ServerDiagnosticsNegativeTest-1726197908-project-member] Releasing lock "refresh_cache-529e6f8e-49b9-46a7-a09f-17238522f7bc" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 794.690879] env[62503]: DEBUG nova.compute.manager [None req-fc106d5b-533d-42f2-9e23-d15b663f98ba tempest-ServerDiagnosticsNegativeTest-1726197908 tempest-ServerDiagnosticsNegativeTest-1726197908-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62503) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3032}} [ 794.691059] env[62503]: DEBUG nova.compute.manager [None req-fc106d5b-533d-42f2-9e23-d15b663f98ba tempest-ServerDiagnosticsNegativeTest-1726197908 tempest-ServerDiagnosticsNegativeTest-1726197908-project-member] [instance: 529e6f8e-49b9-46a7-a09f-17238522f7bc] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 794.691227] env[62503]: DEBUG nova.network.neutron [None req-fc106d5b-533d-42f2-9e23-d15b663f98ba tempest-ServerDiagnosticsNegativeTest-1726197908 tempest-ServerDiagnosticsNegativeTest-1726197908-project-member] [instance: 529e6f8e-49b9-46a7-a09f-17238522f7bc] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 794.707366] env[62503]: DEBUG nova.network.neutron [None req-fc106d5b-533d-42f2-9e23-d15b663f98ba tempest-ServerDiagnosticsNegativeTest-1726197908 tempest-ServerDiagnosticsNegativeTest-1726197908-project-member] [instance: 529e6f8e-49b9-46a7-a09f-17238522f7bc] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 795.204628] env[62503]: INFO nova.scheduler.client.report [None req-be269e0c-079d-42ba-b169-f1c6935976f8 tempest-ServerMetadataTestJSON-2107799143 tempest-ServerMetadataTestJSON-2107799143-project-member] Deleted allocations for instance ff56659a-18f8-44c5-ab10-872e636a9357 [ 795.209986] env[62503]: DEBUG nova.network.neutron [None req-fc106d5b-533d-42f2-9e23-d15b663f98ba tempest-ServerDiagnosticsNegativeTest-1726197908 tempest-ServerDiagnosticsNegativeTest-1726197908-project-member] [instance: 529e6f8e-49b9-46a7-a09f-17238522f7bc] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 795.674720] env[62503]: DEBUG oslo_concurrency.lockutils [None req-be269e0c-079d-42ba-b169-f1c6935976f8 tempest-ServerMetadataTestJSON-2107799143 tempest-ServerMetadataTestJSON-2107799143-project-member] Lock "ff56659a-18f8-44c5-ab10-872e636a9357" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 141.810s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 795.713906] env[62503]: INFO nova.compute.manager [None req-fc106d5b-533d-42f2-9e23-d15b663f98ba tempest-ServerDiagnosticsNegativeTest-1726197908 tempest-ServerDiagnosticsNegativeTest-1726197908-project-member] [instance: 529e6f8e-49b9-46a7-a09f-17238522f7bc] Took 1.02 seconds to deallocate network for instance. [ 795.990911] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c80abf8-d424-4761-bbdb-a8d3b6aa5921 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.998239] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1076e3d7-00f9-431a-b840-5e742d333cec {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.027144] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fc3d85c-1b05-49aa-9f3b-a858b5691dfe {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.034017] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fd7b47a-b426-4025-97c0-3c7c4e1992e6 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.046436] env[62503]: DEBUG nova.compute.provider_tree [None req-2d6004ed-f8ef-4f69-80bc-a0a595a2cd5a tempest-AttachVolumeTestJSON-1238928908 tempest-AttachVolumeTestJSON-1238928908-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 796.177028] env[62503]: DEBUG nova.compute.manager [None req-47a18854-ca82-4c3a-9a6e-798e903be44b tempest-ServerRescueTestJSONUnderV235-862506334 tempest-ServerRescueTestJSONUnderV235-862506334-project-member] [instance: 4cb117e3-ff57-4e7f-bb2b-a12c988e362c] Starting instance... {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 796.549880] env[62503]: DEBUG nova.scheduler.client.report [None req-2d6004ed-f8ef-4f69-80bc-a0a595a2cd5a tempest-AttachVolumeTestJSON-1238928908 tempest-AttachVolumeTestJSON-1238928908-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 796.695277] env[62503]: DEBUG oslo_concurrency.lockutils [None req-47a18854-ca82-4c3a-9a6e-798e903be44b tempest-ServerRescueTestJSONUnderV235-862506334 tempest-ServerRescueTestJSONUnderV235-862506334-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 796.751026] env[62503]: INFO nova.scheduler.client.report [None req-fc106d5b-533d-42f2-9e23-d15b663f98ba tempest-ServerDiagnosticsNegativeTest-1726197908 tempest-ServerDiagnosticsNegativeTest-1726197908-project-member] Deleted allocations for instance 529e6f8e-49b9-46a7-a09f-17238522f7bc [ 797.057583] env[62503]: DEBUG oslo_concurrency.lockutils [None req-2d6004ed-f8ef-4f69-80bc-a0a595a2cd5a tempest-AttachVolumeTestJSON-1238928908 tempest-AttachVolumeTestJSON-1238928908-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.424s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 797.058127] env[62503]: DEBUG nova.compute.manager [None req-2d6004ed-f8ef-4f69-80bc-a0a595a2cd5a tempest-AttachVolumeTestJSON-1238928908 tempest-AttachVolumeTestJSON-1238928908-project-member] [instance: 9eee91c6-a949-453b-8ccd-ba986251ed27] Start building networks asynchronously for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2832}} [ 797.061266] env[62503]: DEBUG oslo_concurrency.lockutils [None req-591ac4de-8a33-46d5-88d4-1e4a4b4bb606 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 24.680s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 797.061390] env[62503]: DEBUG oslo_concurrency.lockutils [None req-591ac4de-8a33-46d5-88d4-1e4a4b4bb606 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 797.063299] env[62503]: DEBUG oslo_concurrency.lockutils [None req-ecc5c12d-2a3e-430a-9c3c-15d45f1a4068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 24.051s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 797.100963] env[62503]: INFO nova.scheduler.client.report [None req-591ac4de-8a33-46d5-88d4-1e4a4b4bb606 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Deleted allocations for instance d4990c66-63d5-43b0-8187-2074c99ccde2 [ 797.262035] env[62503]: DEBUG oslo_concurrency.lockutils [None req-fc106d5b-533d-42f2-9e23-d15b663f98ba tempest-ServerDiagnosticsNegativeTest-1726197908 tempest-ServerDiagnosticsNegativeTest-1726197908-project-member] Lock "529e6f8e-49b9-46a7-a09f-17238522f7bc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 128.697s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 797.567904] env[62503]: DEBUG nova.compute.utils [None req-2d6004ed-f8ef-4f69-80bc-a0a595a2cd5a tempest-AttachVolumeTestJSON-1238928908 tempest-AttachVolumeTestJSON-1238928908-project-member] Using /dev/sd instead of None {{(pid=62503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 797.574947] env[62503]: DEBUG nova.compute.manager [None req-2d6004ed-f8ef-4f69-80bc-a0a595a2cd5a tempest-AttachVolumeTestJSON-1238928908 tempest-AttachVolumeTestJSON-1238928908-project-member] [instance: 9eee91c6-a949-453b-8ccd-ba986251ed27] Allocating IP information in the background. {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 797.575235] env[62503]: DEBUG nova.network.neutron [None req-2d6004ed-f8ef-4f69-80bc-a0a595a2cd5a tempest-AttachVolumeTestJSON-1238928908 tempest-AttachVolumeTestJSON-1238928908-project-member] [instance: 9eee91c6-a949-453b-8ccd-ba986251ed27] allocate_for_instance() {{(pid=62503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 797.609737] env[62503]: DEBUG oslo_concurrency.lockutils [None req-591ac4de-8a33-46d5-88d4-1e4a4b4bb606 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Lock "d4990c66-63d5-43b0-8187-2074c99ccde2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 28.900s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 797.638676] env[62503]: DEBUG nova.policy [None req-2d6004ed-f8ef-4f69-80bc-a0a595a2cd5a tempest-AttachVolumeTestJSON-1238928908 tempest-AttachVolumeTestJSON-1238928908-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd50229f835b5492e9ade7d1deb12cf1b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'eabfbceb13fa4254a63e5c69490241a1', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62503) authorize /opt/stack/nova/nova/policy.py:201}} [ 797.764643] env[62503]: DEBUG nova.compute.manager [None req-0f7e3892-329e-443e-a8e9-e702f02c6757 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] [instance: 09688e22-9225-4619-a9aa-eddb332cb8ab] Starting instance... {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 797.778512] env[62503]: DEBUG oslo_concurrency.lockutils [None req-d5689ca1-10ce-4a97-b742-119ca139fe26 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Acquiring lock "141d7d04-0267-4e15-90ed-112ac8fb8c9b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 797.778890] env[62503]: DEBUG oslo_concurrency.lockutils [None req-d5689ca1-10ce-4a97-b742-119ca139fe26 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Lock "141d7d04-0267-4e15-90ed-112ac8fb8c9b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 797.778979] env[62503]: DEBUG oslo_concurrency.lockutils [None req-d5689ca1-10ce-4a97-b742-119ca139fe26 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Acquiring lock "141d7d04-0267-4e15-90ed-112ac8fb8c9b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 797.779221] env[62503]: DEBUG oslo_concurrency.lockutils [None req-d5689ca1-10ce-4a97-b742-119ca139fe26 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Lock "141d7d04-0267-4e15-90ed-112ac8fb8c9b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 797.779401] env[62503]: DEBUG oslo_concurrency.lockutils [None req-d5689ca1-10ce-4a97-b742-119ca139fe26 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Lock "141d7d04-0267-4e15-90ed-112ac8fb8c9b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 797.781491] env[62503]: INFO nova.compute.manager [None req-d5689ca1-10ce-4a97-b742-119ca139fe26 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] [instance: 141d7d04-0267-4e15-90ed-112ac8fb8c9b] Terminating instance [ 797.783167] env[62503]: DEBUG oslo_concurrency.lockutils [None req-d5689ca1-10ce-4a97-b742-119ca139fe26 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Acquiring lock "refresh_cache-141d7d04-0267-4e15-90ed-112ac8fb8c9b" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 797.783333] env[62503]: DEBUG oslo_concurrency.lockutils [None req-d5689ca1-10ce-4a97-b742-119ca139fe26 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Acquired lock "refresh_cache-141d7d04-0267-4e15-90ed-112ac8fb8c9b" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 797.783477] env[62503]: DEBUG nova.network.neutron [None req-d5689ca1-10ce-4a97-b742-119ca139fe26 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] [instance: 141d7d04-0267-4e15-90ed-112ac8fb8c9b] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 797.837110] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8167104a-d115-4b16-b0da-5473f139f819 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.845312] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-faf70d14-e4af-45ce-aeb7-e0cb39c10ff4 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.876490] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-747cb67c-8531-4bd3-8f00-096fa4e3bc08 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.883962] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28718d05-6987-420c-810e-e710b9b0a7fa {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.897271] env[62503]: DEBUG nova.compute.provider_tree [None req-ecc5c12d-2a3e-430a-9c3c-15d45f1a4068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 798.001269] env[62503]: DEBUG nova.network.neutron [None req-2d6004ed-f8ef-4f69-80bc-a0a595a2cd5a tempest-AttachVolumeTestJSON-1238928908 tempest-AttachVolumeTestJSON-1238928908-project-member] [instance: 9eee91c6-a949-453b-8ccd-ba986251ed27] Successfully created port: 64751884-9fbd-4102-a5b7-cdbad8d77b85 {{(pid=62503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 798.075534] env[62503]: DEBUG nova.compute.manager [None req-2d6004ed-f8ef-4f69-80bc-a0a595a2cd5a tempest-AttachVolumeTestJSON-1238928908 tempest-AttachVolumeTestJSON-1238928908-project-member] [instance: 9eee91c6-a949-453b-8ccd-ba986251ed27] Start building block device mappings for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2867}} [ 798.284872] env[62503]: DEBUG oslo_concurrency.lockutils [None req-0f7e3892-329e-443e-a8e9-e702f02c6757 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 798.309405] env[62503]: DEBUG nova.network.neutron [None req-d5689ca1-10ce-4a97-b742-119ca139fe26 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] [instance: 141d7d04-0267-4e15-90ed-112ac8fb8c9b] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 798.395675] env[62503]: DEBUG nova.network.neutron [None req-d5689ca1-10ce-4a97-b742-119ca139fe26 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] [instance: 141d7d04-0267-4e15-90ed-112ac8fb8c9b] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 798.401290] env[62503]: DEBUG nova.scheduler.client.report [None req-ecc5c12d-2a3e-430a-9c3c-15d45f1a4068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 798.898094] env[62503]: DEBUG oslo_concurrency.lockutils [None req-d5689ca1-10ce-4a97-b742-119ca139fe26 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Releasing lock "refresh_cache-141d7d04-0267-4e15-90ed-112ac8fb8c9b" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 798.898546] env[62503]: DEBUG nova.compute.manager [None req-d5689ca1-10ce-4a97-b742-119ca139fe26 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] [instance: 141d7d04-0267-4e15-90ed-112ac8fb8c9b] Start destroying the instance on the hypervisor. {{(pid=62503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3156}} [ 798.898740] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-d5689ca1-10ce-4a97-b742-119ca139fe26 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] [instance: 141d7d04-0267-4e15-90ed-112ac8fb8c9b] Destroying instance {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 798.899683] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45ed838c-b8b2-42a7-81c8-0bca8816c6e7 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.907720] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-d5689ca1-10ce-4a97-b742-119ca139fe26 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] [instance: 141d7d04-0267-4e15-90ed-112ac8fb8c9b] Powering off the VM {{(pid=62503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 798.907720] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4def2a2d-5363-493c-acff-18bd791905e6 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.911528] env[62503]: DEBUG oslo_concurrency.lockutils [None req-ecc5c12d-2a3e-430a-9c3c-15d45f1a4068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.848s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 798.912184] env[62503]: ERROR nova.compute.manager [None req-ecc5c12d-2a3e-430a-9c3c-15d45f1a4068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: ad7badc9-cb11-4532-885a-28fb3d4de9ef] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port de38585f-ec61-4735-b9c7-0dcb8ebfaecc, please check neutron logs for more information. [ 798.912184] env[62503]: ERROR nova.compute.manager [instance: ad7badc9-cb11-4532-885a-28fb3d4de9ef] Traceback (most recent call last): [ 798.912184] env[62503]: ERROR nova.compute.manager [instance: ad7badc9-cb11-4532-885a-28fb3d4de9ef] File "/opt/stack/nova/nova/compute/manager.py", line 2644, in _build_and_run_instance [ 798.912184] env[62503]: ERROR nova.compute.manager [instance: ad7badc9-cb11-4532-885a-28fb3d4de9ef] self.driver.spawn(context, instance, image_meta, [ 798.912184] env[62503]: ERROR nova.compute.manager [instance: ad7badc9-cb11-4532-885a-28fb3d4de9ef] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 798.912184] env[62503]: ERROR nova.compute.manager [instance: ad7badc9-cb11-4532-885a-28fb3d4de9ef] self._vmops.spawn(context, instance, image_meta, injected_files, [ 798.912184] env[62503]: ERROR nova.compute.manager [instance: ad7badc9-cb11-4532-885a-28fb3d4de9ef] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 798.912184] env[62503]: ERROR nova.compute.manager [instance: ad7badc9-cb11-4532-885a-28fb3d4de9ef] vm_ref = self.build_virtual_machine(instance, [ 798.912184] env[62503]: ERROR nova.compute.manager [instance: ad7badc9-cb11-4532-885a-28fb3d4de9ef] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 798.912184] env[62503]: ERROR nova.compute.manager [instance: ad7badc9-cb11-4532-885a-28fb3d4de9ef] vif_infos = vmwarevif.get_vif_info(self._session, [ 798.912184] env[62503]: ERROR nova.compute.manager [instance: ad7badc9-cb11-4532-885a-28fb3d4de9ef] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 798.912473] env[62503]: ERROR nova.compute.manager [instance: ad7badc9-cb11-4532-885a-28fb3d4de9ef] for vif in network_info: [ 798.912473] env[62503]: ERROR nova.compute.manager [instance: ad7badc9-cb11-4532-885a-28fb3d4de9ef] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 798.912473] env[62503]: ERROR nova.compute.manager [instance: ad7badc9-cb11-4532-885a-28fb3d4de9ef] return self._sync_wrapper(fn, *args, **kwargs) [ 798.912473] env[62503]: ERROR nova.compute.manager [instance: ad7badc9-cb11-4532-885a-28fb3d4de9ef] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 798.912473] env[62503]: ERROR nova.compute.manager [instance: ad7badc9-cb11-4532-885a-28fb3d4de9ef] self.wait() [ 798.912473] env[62503]: ERROR nova.compute.manager [instance: ad7badc9-cb11-4532-885a-28fb3d4de9ef] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 798.912473] env[62503]: ERROR nova.compute.manager [instance: ad7badc9-cb11-4532-885a-28fb3d4de9ef] self[:] = self._gt.wait() [ 798.912473] env[62503]: ERROR nova.compute.manager [instance: ad7badc9-cb11-4532-885a-28fb3d4de9ef] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 798.912473] env[62503]: ERROR nova.compute.manager [instance: ad7badc9-cb11-4532-885a-28fb3d4de9ef] return self._exit_event.wait() [ 798.912473] env[62503]: ERROR nova.compute.manager [instance: ad7badc9-cb11-4532-885a-28fb3d4de9ef] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 798.912473] env[62503]: ERROR nova.compute.manager [instance: ad7badc9-cb11-4532-885a-28fb3d4de9ef] current.throw(*self._exc) [ 798.912473] env[62503]: ERROR nova.compute.manager [instance: ad7badc9-cb11-4532-885a-28fb3d4de9ef] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 798.912473] env[62503]: ERROR nova.compute.manager [instance: ad7badc9-cb11-4532-885a-28fb3d4de9ef] result = function(*args, **kwargs) [ 798.912755] env[62503]: ERROR nova.compute.manager [instance: ad7badc9-cb11-4532-885a-28fb3d4de9ef] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 798.912755] env[62503]: ERROR nova.compute.manager [instance: ad7badc9-cb11-4532-885a-28fb3d4de9ef] return func(*args, **kwargs) [ 798.912755] env[62503]: ERROR nova.compute.manager [instance: ad7badc9-cb11-4532-885a-28fb3d4de9ef] File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 798.912755] env[62503]: ERROR nova.compute.manager [instance: ad7badc9-cb11-4532-885a-28fb3d4de9ef] raise e [ 798.912755] env[62503]: ERROR nova.compute.manager [instance: ad7badc9-cb11-4532-885a-28fb3d4de9ef] File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 798.912755] env[62503]: ERROR nova.compute.manager [instance: ad7badc9-cb11-4532-885a-28fb3d4de9ef] nwinfo = self.network_api.allocate_for_instance( [ 798.912755] env[62503]: ERROR nova.compute.manager [instance: ad7badc9-cb11-4532-885a-28fb3d4de9ef] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 798.912755] env[62503]: ERROR nova.compute.manager [instance: ad7badc9-cb11-4532-885a-28fb3d4de9ef] created_port_ids = self._update_ports_for_instance( [ 798.912755] env[62503]: ERROR nova.compute.manager [instance: ad7badc9-cb11-4532-885a-28fb3d4de9ef] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 798.912755] env[62503]: ERROR nova.compute.manager [instance: ad7badc9-cb11-4532-885a-28fb3d4de9ef] with excutils.save_and_reraise_exception(): [ 798.912755] env[62503]: ERROR nova.compute.manager [instance: ad7badc9-cb11-4532-885a-28fb3d4de9ef] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 798.912755] env[62503]: ERROR nova.compute.manager [instance: ad7badc9-cb11-4532-885a-28fb3d4de9ef] self.force_reraise() [ 798.912755] env[62503]: ERROR nova.compute.manager [instance: ad7badc9-cb11-4532-885a-28fb3d4de9ef] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 798.913161] env[62503]: ERROR nova.compute.manager [instance: ad7badc9-cb11-4532-885a-28fb3d4de9ef] raise self.value [ 798.913161] env[62503]: ERROR nova.compute.manager [instance: ad7badc9-cb11-4532-885a-28fb3d4de9ef] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 798.913161] env[62503]: ERROR nova.compute.manager [instance: ad7badc9-cb11-4532-885a-28fb3d4de9ef] updated_port = self._update_port( [ 798.913161] env[62503]: ERROR nova.compute.manager [instance: ad7badc9-cb11-4532-885a-28fb3d4de9ef] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 798.913161] env[62503]: ERROR nova.compute.manager [instance: ad7badc9-cb11-4532-885a-28fb3d4de9ef] _ensure_no_port_binding_failure(port) [ 798.913161] env[62503]: ERROR nova.compute.manager [instance: ad7badc9-cb11-4532-885a-28fb3d4de9ef] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 798.913161] env[62503]: ERROR nova.compute.manager [instance: ad7badc9-cb11-4532-885a-28fb3d4de9ef] raise exception.PortBindingFailed(port_id=port['id']) [ 798.913161] env[62503]: ERROR nova.compute.manager [instance: ad7badc9-cb11-4532-885a-28fb3d4de9ef] nova.exception.PortBindingFailed: Binding failed for port de38585f-ec61-4735-b9c7-0dcb8ebfaecc, please check neutron logs for more information. [ 798.913161] env[62503]: ERROR nova.compute.manager [instance: ad7badc9-cb11-4532-885a-28fb3d4de9ef] [ 798.913161] env[62503]: DEBUG nova.compute.utils [None req-ecc5c12d-2a3e-430a-9c3c-15d45f1a4068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: ad7badc9-cb11-4532-885a-28fb3d4de9ef] Binding failed for port de38585f-ec61-4735-b9c7-0dcb8ebfaecc, please check neutron logs for more information. {{(pid=62503) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 798.916273] env[62503]: DEBUG oslo_concurrency.lockutils [None req-795b6309-d5a9-40f9-8a07-369f392b468e tempest-ServersTestManualDisk-925778780 tempest-ServersTestManualDisk-925778780-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.235s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 798.917706] env[62503]: INFO nova.compute.claims [None req-795b6309-d5a9-40f9-8a07-369f392b468e tempest-ServersTestManualDisk-925778780 tempest-ServersTestManualDisk-925778780-project-member] [instance: f6f17748-815c-417f-bce6-3bc97f23b637] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 798.921187] env[62503]: DEBUG oslo_vmware.api [None req-d5689ca1-10ce-4a97-b742-119ca139fe26 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Waiting for the task: (returnval){ [ 798.921187] env[62503]: value = "task-1387753" [ 798.921187] env[62503]: _type = "Task" [ 798.921187] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 798.922052] env[62503]: DEBUG nova.compute.manager [None req-ecc5c12d-2a3e-430a-9c3c-15d45f1a4068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: ad7badc9-cb11-4532-885a-28fb3d4de9ef] Build of instance ad7badc9-cb11-4532-885a-28fb3d4de9ef was re-scheduled: Binding failed for port de38585f-ec61-4735-b9c7-0dcb8ebfaecc, please check neutron logs for more information. {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2483}} [ 798.922532] env[62503]: DEBUG nova.compute.manager [None req-ecc5c12d-2a3e-430a-9c3c-15d45f1a4068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: ad7badc9-cb11-4532-885a-28fb3d4de9ef] Unplugging VIFs for instance {{(pid=62503) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3009}} [ 798.922823] env[62503]: DEBUG oslo_concurrency.lockutils [None req-ecc5c12d-2a3e-430a-9c3c-15d45f1a4068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Acquiring lock "refresh_cache-ad7badc9-cb11-4532-885a-28fb3d4de9ef" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 798.923047] env[62503]: DEBUG oslo_concurrency.lockutils [None req-ecc5c12d-2a3e-430a-9c3c-15d45f1a4068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Acquired lock "refresh_cache-ad7badc9-cb11-4532-885a-28fb3d4de9ef" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 798.923273] env[62503]: DEBUG nova.network.neutron [None req-ecc5c12d-2a3e-430a-9c3c-15d45f1a4068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: ad7badc9-cb11-4532-885a-28fb3d4de9ef] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 798.934098] env[62503]: DEBUG oslo_vmware.api [None req-d5689ca1-10ce-4a97-b742-119ca139fe26 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Task: {'id': task-1387753, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 799.089511] env[62503]: DEBUG nova.compute.manager [None req-2d6004ed-f8ef-4f69-80bc-a0a595a2cd5a tempest-AttachVolumeTestJSON-1238928908 tempest-AttachVolumeTestJSON-1238928908-project-member] [instance: 9eee91c6-a949-453b-8ccd-ba986251ed27] Start spawning the instance on the hypervisor. {{(pid=62503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2641}} [ 799.119647] env[62503]: DEBUG nova.virt.hardware [None req-2d6004ed-f8ef-4f69-80bc-a0a595a2cd5a tempest-AttachVolumeTestJSON-1238928908 tempest-AttachVolumeTestJSON-1238928908-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:26:20Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:26:03Z,direct_url=,disk_format='vmdk',id=8150ca02-f879-471d-8913-459408f127a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26d9ee75d25b4018b8bb1fb11c8bd98c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:26:04Z,virtual_size=,visibility=), allow threads: False {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 799.120243] env[62503]: DEBUG nova.virt.hardware [None req-2d6004ed-f8ef-4f69-80bc-a0a595a2cd5a tempest-AttachVolumeTestJSON-1238928908 tempest-AttachVolumeTestJSON-1238928908-project-member] Flavor limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 799.120853] env[62503]: DEBUG nova.virt.hardware [None req-2d6004ed-f8ef-4f69-80bc-a0a595a2cd5a tempest-AttachVolumeTestJSON-1238928908 tempest-AttachVolumeTestJSON-1238928908-project-member] Image limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 799.121143] env[62503]: DEBUG nova.virt.hardware [None req-2d6004ed-f8ef-4f69-80bc-a0a595a2cd5a tempest-AttachVolumeTestJSON-1238928908 tempest-AttachVolumeTestJSON-1238928908-project-member] Flavor pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 799.121371] env[62503]: DEBUG nova.virt.hardware [None req-2d6004ed-f8ef-4f69-80bc-a0a595a2cd5a tempest-AttachVolumeTestJSON-1238928908 tempest-AttachVolumeTestJSON-1238928908-project-member] Image pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 799.122160] env[62503]: DEBUG nova.virt.hardware [None req-2d6004ed-f8ef-4f69-80bc-a0a595a2cd5a tempest-AttachVolumeTestJSON-1238928908 tempest-AttachVolumeTestJSON-1238928908-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 799.122160] env[62503]: DEBUG nova.virt.hardware [None req-2d6004ed-f8ef-4f69-80bc-a0a595a2cd5a tempest-AttachVolumeTestJSON-1238928908 tempest-AttachVolumeTestJSON-1238928908-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 799.122160] env[62503]: DEBUG nova.virt.hardware [None req-2d6004ed-f8ef-4f69-80bc-a0a595a2cd5a tempest-AttachVolumeTestJSON-1238928908 tempest-AttachVolumeTestJSON-1238928908-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 799.122160] env[62503]: DEBUG nova.virt.hardware [None req-2d6004ed-f8ef-4f69-80bc-a0a595a2cd5a tempest-AttachVolumeTestJSON-1238928908 tempest-AttachVolumeTestJSON-1238928908-project-member] Got 1 possible topologies {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 799.122415] env[62503]: DEBUG nova.virt.hardware [None req-2d6004ed-f8ef-4f69-80bc-a0a595a2cd5a tempest-AttachVolumeTestJSON-1238928908 tempest-AttachVolumeTestJSON-1238928908-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 799.122622] env[62503]: DEBUG nova.virt.hardware [None req-2d6004ed-f8ef-4f69-80bc-a0a595a2cd5a tempest-AttachVolumeTestJSON-1238928908 tempest-AttachVolumeTestJSON-1238928908-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 799.123553] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a0be3a9-05ab-4b97-a148-eb0cf0597825 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.131726] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc72d5aa-e484-4854-ab89-02f3ed285d5c {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.253160] env[62503]: DEBUG nova.compute.manager [req-128b3f93-4029-43e8-b696-04bff9458456 req-0fb841a6-9b4c-432c-b5c4-41c0e4a1df4e service nova] [instance: 9eee91c6-a949-453b-8ccd-ba986251ed27] Received event network-changed-64751884-9fbd-4102-a5b7-cdbad8d77b85 {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 799.253375] env[62503]: DEBUG nova.compute.manager [req-128b3f93-4029-43e8-b696-04bff9458456 req-0fb841a6-9b4c-432c-b5c4-41c0e4a1df4e service nova] [instance: 9eee91c6-a949-453b-8ccd-ba986251ed27] Refreshing instance network info cache due to event network-changed-64751884-9fbd-4102-a5b7-cdbad8d77b85. {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11432}} [ 799.253589] env[62503]: DEBUG oslo_concurrency.lockutils [req-128b3f93-4029-43e8-b696-04bff9458456 req-0fb841a6-9b4c-432c-b5c4-41c0e4a1df4e service nova] Acquiring lock "refresh_cache-9eee91c6-a949-453b-8ccd-ba986251ed27" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 799.253730] env[62503]: DEBUG oslo_concurrency.lockutils [req-128b3f93-4029-43e8-b696-04bff9458456 req-0fb841a6-9b4c-432c-b5c4-41c0e4a1df4e service nova] Acquired lock "refresh_cache-9eee91c6-a949-453b-8ccd-ba986251ed27" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 799.253888] env[62503]: DEBUG nova.network.neutron [req-128b3f93-4029-43e8-b696-04bff9458456 req-0fb841a6-9b4c-432c-b5c4-41c0e4a1df4e service nova] [instance: 9eee91c6-a949-453b-8ccd-ba986251ed27] Refreshing network info cache for port 64751884-9fbd-4102-a5b7-cdbad8d77b85 {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 799.442709] env[62503]: DEBUG oslo_vmware.api [None req-d5689ca1-10ce-4a97-b742-119ca139fe26 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Task: {'id': task-1387753, 'name': PowerOffVM_Task, 'duration_secs': 0.118412} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 799.443805] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-d5689ca1-10ce-4a97-b742-119ca139fe26 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] [instance: 141d7d04-0267-4e15-90ed-112ac8fb8c9b] Powered off the VM {{(pid=62503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 799.443980] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-d5689ca1-10ce-4a97-b742-119ca139fe26 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] [instance: 141d7d04-0267-4e15-90ed-112ac8fb8c9b] Unregistering the VM {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 799.444237] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-64358313-0e7c-4b5a-a385-69e60a111f8a {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.460907] env[62503]: DEBUG nova.network.neutron [None req-ecc5c12d-2a3e-430a-9c3c-15d45f1a4068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: ad7badc9-cb11-4532-885a-28fb3d4de9ef] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 799.468043] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-d5689ca1-10ce-4a97-b742-119ca139fe26 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] [instance: 141d7d04-0267-4e15-90ed-112ac8fb8c9b] Unregistered the VM {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 799.468260] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-d5689ca1-10ce-4a97-b742-119ca139fe26 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] [instance: 141d7d04-0267-4e15-90ed-112ac8fb8c9b] Deleting contents of the VM from datastore datastore1 {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 799.468439] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-d5689ca1-10ce-4a97-b742-119ca139fe26 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Deleting the datastore file [datastore1] 141d7d04-0267-4e15-90ed-112ac8fb8c9b {{(pid=62503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 799.468673] env[62503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4409fbfe-80ca-419d-b398-05b4ce0d2d53 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.474306] env[62503]: DEBUG oslo_vmware.api [None req-d5689ca1-10ce-4a97-b742-119ca139fe26 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Waiting for the task: (returnval){ [ 799.474306] env[62503]: value = "task-1387755" [ 799.474306] env[62503]: _type = "Task" [ 799.474306] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 799.484563] env[62503]: DEBUG oslo_vmware.api [None req-d5689ca1-10ce-4a97-b742-119ca139fe26 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Task: {'id': task-1387755, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 799.623450] env[62503]: DEBUG nova.network.neutron [None req-ecc5c12d-2a3e-430a-9c3c-15d45f1a4068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: ad7badc9-cb11-4532-885a-28fb3d4de9ef] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 799.739717] env[62503]: ERROR nova.compute.manager [None req-2d6004ed-f8ef-4f69-80bc-a0a595a2cd5a tempest-AttachVolumeTestJSON-1238928908 tempest-AttachVolumeTestJSON-1238928908-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 64751884-9fbd-4102-a5b7-cdbad8d77b85, please check neutron logs for more information. [ 799.739717] env[62503]: ERROR nova.compute.manager Traceback (most recent call last): [ 799.739717] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 799.739717] env[62503]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 799.739717] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 799.739717] env[62503]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 799.739717] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 799.739717] env[62503]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 799.739717] env[62503]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 799.739717] env[62503]: ERROR nova.compute.manager self.force_reraise() [ 799.739717] env[62503]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 799.739717] env[62503]: ERROR nova.compute.manager raise self.value [ 799.739717] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 799.739717] env[62503]: ERROR nova.compute.manager updated_port = self._update_port( [ 799.739717] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 799.739717] env[62503]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 799.740108] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 799.740108] env[62503]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 799.740108] env[62503]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 64751884-9fbd-4102-a5b7-cdbad8d77b85, please check neutron logs for more information. [ 799.740108] env[62503]: ERROR nova.compute.manager [ 799.740108] env[62503]: Traceback (most recent call last): [ 799.740108] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 799.740108] env[62503]: listener.cb(fileno) [ 799.740108] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 799.740108] env[62503]: result = function(*args, **kwargs) [ 799.740108] env[62503]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 799.740108] env[62503]: return func(*args, **kwargs) [ 799.740108] env[62503]: File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 799.740108] env[62503]: raise e [ 799.740108] env[62503]: File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 799.740108] env[62503]: nwinfo = self.network_api.allocate_for_instance( [ 799.740108] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 799.740108] env[62503]: created_port_ids = self._update_ports_for_instance( [ 799.740108] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 799.740108] env[62503]: with excutils.save_and_reraise_exception(): [ 799.740108] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 799.740108] env[62503]: self.force_reraise() [ 799.740108] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 799.740108] env[62503]: raise self.value [ 799.740108] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 799.740108] env[62503]: updated_port = self._update_port( [ 799.740108] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 799.740108] env[62503]: _ensure_no_port_binding_failure(port) [ 799.740108] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 799.740108] env[62503]: raise exception.PortBindingFailed(port_id=port['id']) [ 799.740775] env[62503]: nova.exception.PortBindingFailed: Binding failed for port 64751884-9fbd-4102-a5b7-cdbad8d77b85, please check neutron logs for more information. [ 799.740775] env[62503]: Removing descriptor: 14 [ 799.740775] env[62503]: ERROR nova.compute.manager [None req-2d6004ed-f8ef-4f69-80bc-a0a595a2cd5a tempest-AttachVolumeTestJSON-1238928908 tempest-AttachVolumeTestJSON-1238928908-project-member] [instance: 9eee91c6-a949-453b-8ccd-ba986251ed27] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 64751884-9fbd-4102-a5b7-cdbad8d77b85, please check neutron logs for more information. [ 799.740775] env[62503]: ERROR nova.compute.manager [instance: 9eee91c6-a949-453b-8ccd-ba986251ed27] Traceback (most recent call last): [ 799.740775] env[62503]: ERROR nova.compute.manager [instance: 9eee91c6-a949-453b-8ccd-ba986251ed27] File "/opt/stack/nova/nova/compute/manager.py", line 2897, in _build_resources [ 799.740775] env[62503]: ERROR nova.compute.manager [instance: 9eee91c6-a949-453b-8ccd-ba986251ed27] yield resources [ 799.740775] env[62503]: ERROR nova.compute.manager [instance: 9eee91c6-a949-453b-8ccd-ba986251ed27] File "/opt/stack/nova/nova/compute/manager.py", line 2644, in _build_and_run_instance [ 799.740775] env[62503]: ERROR nova.compute.manager [instance: 9eee91c6-a949-453b-8ccd-ba986251ed27] self.driver.spawn(context, instance, image_meta, [ 799.740775] env[62503]: ERROR nova.compute.manager [instance: 9eee91c6-a949-453b-8ccd-ba986251ed27] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 799.740775] env[62503]: ERROR nova.compute.manager [instance: 9eee91c6-a949-453b-8ccd-ba986251ed27] self._vmops.spawn(context, instance, image_meta, injected_files, [ 799.740775] env[62503]: ERROR nova.compute.manager [instance: 9eee91c6-a949-453b-8ccd-ba986251ed27] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 799.740775] env[62503]: ERROR nova.compute.manager [instance: 9eee91c6-a949-453b-8ccd-ba986251ed27] vm_ref = self.build_virtual_machine(instance, [ 799.741563] env[62503]: ERROR nova.compute.manager [instance: 9eee91c6-a949-453b-8ccd-ba986251ed27] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 799.741563] env[62503]: ERROR nova.compute.manager [instance: 9eee91c6-a949-453b-8ccd-ba986251ed27] vif_infos = vmwarevif.get_vif_info(self._session, [ 799.741563] env[62503]: ERROR nova.compute.manager [instance: 9eee91c6-a949-453b-8ccd-ba986251ed27] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 799.741563] env[62503]: ERROR nova.compute.manager [instance: 9eee91c6-a949-453b-8ccd-ba986251ed27] for vif in network_info: [ 799.741563] env[62503]: ERROR nova.compute.manager [instance: 9eee91c6-a949-453b-8ccd-ba986251ed27] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 799.741563] env[62503]: ERROR nova.compute.manager [instance: 9eee91c6-a949-453b-8ccd-ba986251ed27] return self._sync_wrapper(fn, *args, **kwargs) [ 799.741563] env[62503]: ERROR nova.compute.manager [instance: 9eee91c6-a949-453b-8ccd-ba986251ed27] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 799.741563] env[62503]: ERROR nova.compute.manager [instance: 9eee91c6-a949-453b-8ccd-ba986251ed27] self.wait() [ 799.741563] env[62503]: ERROR nova.compute.manager [instance: 9eee91c6-a949-453b-8ccd-ba986251ed27] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 799.741563] env[62503]: ERROR nova.compute.manager [instance: 9eee91c6-a949-453b-8ccd-ba986251ed27] self[:] = self._gt.wait() [ 799.741563] env[62503]: ERROR nova.compute.manager [instance: 9eee91c6-a949-453b-8ccd-ba986251ed27] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 799.741563] env[62503]: ERROR nova.compute.manager [instance: 9eee91c6-a949-453b-8ccd-ba986251ed27] return self._exit_event.wait() [ 799.741563] env[62503]: ERROR nova.compute.manager [instance: 9eee91c6-a949-453b-8ccd-ba986251ed27] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 799.741819] env[62503]: ERROR nova.compute.manager [instance: 9eee91c6-a949-453b-8ccd-ba986251ed27] result = hub.switch() [ 799.741819] env[62503]: ERROR nova.compute.manager [instance: 9eee91c6-a949-453b-8ccd-ba986251ed27] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 799.741819] env[62503]: ERROR nova.compute.manager [instance: 9eee91c6-a949-453b-8ccd-ba986251ed27] return self.greenlet.switch() [ 799.741819] env[62503]: ERROR nova.compute.manager [instance: 9eee91c6-a949-453b-8ccd-ba986251ed27] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 799.741819] env[62503]: ERROR nova.compute.manager [instance: 9eee91c6-a949-453b-8ccd-ba986251ed27] result = function(*args, **kwargs) [ 799.741819] env[62503]: ERROR nova.compute.manager [instance: 9eee91c6-a949-453b-8ccd-ba986251ed27] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 799.741819] env[62503]: ERROR nova.compute.manager [instance: 9eee91c6-a949-453b-8ccd-ba986251ed27] return func(*args, **kwargs) [ 799.741819] env[62503]: ERROR nova.compute.manager [instance: 9eee91c6-a949-453b-8ccd-ba986251ed27] File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 799.741819] env[62503]: ERROR nova.compute.manager [instance: 9eee91c6-a949-453b-8ccd-ba986251ed27] raise e [ 799.741819] env[62503]: ERROR nova.compute.manager [instance: 9eee91c6-a949-453b-8ccd-ba986251ed27] File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 799.741819] env[62503]: ERROR nova.compute.manager [instance: 9eee91c6-a949-453b-8ccd-ba986251ed27] nwinfo = self.network_api.allocate_for_instance( [ 799.741819] env[62503]: ERROR nova.compute.manager [instance: 9eee91c6-a949-453b-8ccd-ba986251ed27] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 799.741819] env[62503]: ERROR nova.compute.manager [instance: 9eee91c6-a949-453b-8ccd-ba986251ed27] created_port_ids = self._update_ports_for_instance( [ 799.742086] env[62503]: ERROR nova.compute.manager [instance: 9eee91c6-a949-453b-8ccd-ba986251ed27] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 799.742086] env[62503]: ERROR nova.compute.manager [instance: 9eee91c6-a949-453b-8ccd-ba986251ed27] with excutils.save_and_reraise_exception(): [ 799.742086] env[62503]: ERROR nova.compute.manager [instance: 9eee91c6-a949-453b-8ccd-ba986251ed27] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 799.742086] env[62503]: ERROR nova.compute.manager [instance: 9eee91c6-a949-453b-8ccd-ba986251ed27] self.force_reraise() [ 799.742086] env[62503]: ERROR nova.compute.manager [instance: 9eee91c6-a949-453b-8ccd-ba986251ed27] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 799.742086] env[62503]: ERROR nova.compute.manager [instance: 9eee91c6-a949-453b-8ccd-ba986251ed27] raise self.value [ 799.742086] env[62503]: ERROR nova.compute.manager [instance: 9eee91c6-a949-453b-8ccd-ba986251ed27] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 799.742086] env[62503]: ERROR nova.compute.manager [instance: 9eee91c6-a949-453b-8ccd-ba986251ed27] updated_port = self._update_port( [ 799.742086] env[62503]: ERROR nova.compute.manager [instance: 9eee91c6-a949-453b-8ccd-ba986251ed27] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 799.742086] env[62503]: ERROR nova.compute.manager [instance: 9eee91c6-a949-453b-8ccd-ba986251ed27] _ensure_no_port_binding_failure(port) [ 799.742086] env[62503]: ERROR nova.compute.manager [instance: 9eee91c6-a949-453b-8ccd-ba986251ed27] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 799.742086] env[62503]: ERROR nova.compute.manager [instance: 9eee91c6-a949-453b-8ccd-ba986251ed27] raise exception.PortBindingFailed(port_id=port['id']) [ 799.742369] env[62503]: ERROR nova.compute.manager [instance: 9eee91c6-a949-453b-8ccd-ba986251ed27] nova.exception.PortBindingFailed: Binding failed for port 64751884-9fbd-4102-a5b7-cdbad8d77b85, please check neutron logs for more information. [ 799.742369] env[62503]: ERROR nova.compute.manager [instance: 9eee91c6-a949-453b-8ccd-ba986251ed27] [ 799.742369] env[62503]: INFO nova.compute.manager [None req-2d6004ed-f8ef-4f69-80bc-a0a595a2cd5a tempest-AttachVolumeTestJSON-1238928908 tempest-AttachVolumeTestJSON-1238928908-project-member] [instance: 9eee91c6-a949-453b-8ccd-ba986251ed27] Terminating instance [ 799.746504] env[62503]: DEBUG oslo_concurrency.lockutils [None req-2d6004ed-f8ef-4f69-80bc-a0a595a2cd5a tempest-AttachVolumeTestJSON-1238928908 tempest-AttachVolumeTestJSON-1238928908-project-member] Acquiring lock "refresh_cache-9eee91c6-a949-453b-8ccd-ba986251ed27" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 799.781282] env[62503]: DEBUG nova.network.neutron [req-128b3f93-4029-43e8-b696-04bff9458456 req-0fb841a6-9b4c-432c-b5c4-41c0e4a1df4e service nova] [instance: 9eee91c6-a949-453b-8ccd-ba986251ed27] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 799.915647] env[62503]: DEBUG nova.network.neutron [req-128b3f93-4029-43e8-b696-04bff9458456 req-0fb841a6-9b4c-432c-b5c4-41c0e4a1df4e service nova] [instance: 9eee91c6-a949-453b-8ccd-ba986251ed27] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 799.987940] env[62503]: DEBUG oslo_vmware.api [None req-d5689ca1-10ce-4a97-b742-119ca139fe26 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Task: {'id': task-1387755, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.138207} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 799.990451] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-d5689ca1-10ce-4a97-b742-119ca139fe26 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Deleted the datastore file {{(pid=62503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 799.990642] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-d5689ca1-10ce-4a97-b742-119ca139fe26 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] [instance: 141d7d04-0267-4e15-90ed-112ac8fb8c9b] Deleted contents of the VM from datastore datastore1 {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 799.990817] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-d5689ca1-10ce-4a97-b742-119ca139fe26 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] [instance: 141d7d04-0267-4e15-90ed-112ac8fb8c9b] Instance destroyed {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 799.990986] env[62503]: INFO nova.compute.manager [None req-d5689ca1-10ce-4a97-b742-119ca139fe26 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] [instance: 141d7d04-0267-4e15-90ed-112ac8fb8c9b] Took 1.09 seconds to destroy the instance on the hypervisor. [ 799.991247] env[62503]: DEBUG oslo.service.loopingcall [None req-d5689ca1-10ce-4a97-b742-119ca139fe26 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 799.991656] env[62503]: DEBUG nova.compute.manager [-] [instance: 141d7d04-0267-4e15-90ed-112ac8fb8c9b] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 799.991761] env[62503]: DEBUG nova.network.neutron [-] [instance: 141d7d04-0267-4e15-90ed-112ac8fb8c9b] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 800.010437] env[62503]: DEBUG nova.network.neutron [-] [instance: 141d7d04-0267-4e15-90ed-112ac8fb8c9b] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 800.127277] env[62503]: DEBUG oslo_concurrency.lockutils [None req-ecc5c12d-2a3e-430a-9c3c-15d45f1a4068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Releasing lock "refresh_cache-ad7badc9-cb11-4532-885a-28fb3d4de9ef" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 800.127605] env[62503]: DEBUG nova.compute.manager [None req-ecc5c12d-2a3e-430a-9c3c-15d45f1a4068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62503) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3032}} [ 800.127833] env[62503]: DEBUG nova.compute.manager [None req-ecc5c12d-2a3e-430a-9c3c-15d45f1a4068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: ad7badc9-cb11-4532-885a-28fb3d4de9ef] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 800.128044] env[62503]: DEBUG nova.network.neutron [None req-ecc5c12d-2a3e-430a-9c3c-15d45f1a4068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: ad7badc9-cb11-4532-885a-28fb3d4de9ef] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 800.144192] env[62503]: DEBUG nova.network.neutron [None req-ecc5c12d-2a3e-430a-9c3c-15d45f1a4068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: ad7badc9-cb11-4532-885a-28fb3d4de9ef] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 800.180744] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a94788ea-7dec-4a8e-a1de-42baf7173a99 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.190076] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1108637-d7bb-45a9-bd54-7de125b16575 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.220825] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27da676e-276d-49ce-9b43-6155396beaf0 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.228456] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15a49e23-ff2e-44f2-a0c4-96f63444ab99 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.242849] env[62503]: DEBUG nova.compute.provider_tree [None req-795b6309-d5a9-40f9-8a07-369f392b468e tempest-ServersTestManualDisk-925778780 tempest-ServersTestManualDisk-925778780-project-member] Updating inventory in ProviderTree for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 800.418850] env[62503]: DEBUG oslo_concurrency.lockutils [req-128b3f93-4029-43e8-b696-04bff9458456 req-0fb841a6-9b4c-432c-b5c4-41c0e4a1df4e service nova] Releasing lock "refresh_cache-9eee91c6-a949-453b-8ccd-ba986251ed27" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 800.419436] env[62503]: DEBUG oslo_concurrency.lockutils [None req-2d6004ed-f8ef-4f69-80bc-a0a595a2cd5a tempest-AttachVolumeTestJSON-1238928908 tempest-AttachVolumeTestJSON-1238928908-project-member] Acquired lock "refresh_cache-9eee91c6-a949-453b-8ccd-ba986251ed27" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 800.419657] env[62503]: DEBUG nova.network.neutron [None req-2d6004ed-f8ef-4f69-80bc-a0a595a2cd5a tempest-AttachVolumeTestJSON-1238928908 tempest-AttachVolumeTestJSON-1238928908-project-member] [instance: 9eee91c6-a949-453b-8ccd-ba986251ed27] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 800.515703] env[62503]: DEBUG nova.network.neutron [-] [instance: 141d7d04-0267-4e15-90ed-112ac8fb8c9b] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 800.646374] env[62503]: DEBUG nova.network.neutron [None req-ecc5c12d-2a3e-430a-9c3c-15d45f1a4068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: ad7badc9-cb11-4532-885a-28fb3d4de9ef] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 800.762915] env[62503]: ERROR nova.scheduler.client.report [None req-795b6309-d5a9-40f9-8a07-369f392b468e tempest-ServersTestManualDisk-925778780 tempest-ServersTestManualDisk-925778780-project-member] [req-05d07713-d1a6-44e0-b026-7395082158e6] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-05d07713-d1a6-44e0-b026-7395082158e6"}]} [ 800.779843] env[62503]: DEBUG nova.scheduler.client.report [None req-795b6309-d5a9-40f9-8a07-369f392b468e tempest-ServersTestManualDisk-925778780 tempest-ServersTestManualDisk-925778780-project-member] Refreshing inventories for resource provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:819}} [ 800.796922] env[62503]: DEBUG nova.scheduler.client.report [None req-795b6309-d5a9-40f9-8a07-369f392b468e tempest-ServersTestManualDisk-925778780 tempest-ServersTestManualDisk-925778780-project-member] Updating ProviderTree inventory for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:783}} [ 800.797620] env[62503]: DEBUG nova.compute.provider_tree [None req-795b6309-d5a9-40f9-8a07-369f392b468e tempest-ServersTestManualDisk-925778780 tempest-ServersTestManualDisk-925778780-project-member] Updating inventory in ProviderTree for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 800.810369] env[62503]: DEBUG nova.scheduler.client.report [None req-795b6309-d5a9-40f9-8a07-369f392b468e tempest-ServersTestManualDisk-925778780 tempest-ServersTestManualDisk-925778780-project-member] Refreshing aggregate associations for resource provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2, aggregates: None {{(pid=62503) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:828}} [ 800.830236] env[62503]: DEBUG nova.scheduler.client.report [None req-795b6309-d5a9-40f9-8a07-369f392b468e tempest-ServersTestManualDisk-925778780 tempest-ServersTestManualDisk-925778780-project-member] Refreshing trait associations for resource provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2, traits: HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=62503) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:840}} [ 800.945757] env[62503]: DEBUG nova.network.neutron [None req-2d6004ed-f8ef-4f69-80bc-a0a595a2cd5a tempest-AttachVolumeTestJSON-1238928908 tempest-AttachVolumeTestJSON-1238928908-project-member] [instance: 9eee91c6-a949-453b-8ccd-ba986251ed27] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 801.021130] env[62503]: INFO nova.compute.manager [-] [instance: 141d7d04-0267-4e15-90ed-112ac8fb8c9b] Took 1.03 seconds to deallocate network for instance. [ 801.063997] env[62503]: DEBUG nova.network.neutron [None req-2d6004ed-f8ef-4f69-80bc-a0a595a2cd5a tempest-AttachVolumeTestJSON-1238928908 tempest-AttachVolumeTestJSON-1238928908-project-member] [instance: 9eee91c6-a949-453b-8ccd-ba986251ed27] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 801.091197] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60fb668d-764d-406e-99e2-3c7bf0440dca {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.098806] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86c6e995-a58b-4384-afa7-b519cbcca67c {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.128589] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff5d49db-8176-44b6-8310-81dd02d59b0a {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.135960] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fc843d9-77c0-480a-ba17-e4ee64c51332 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.150185] env[62503]: INFO nova.compute.manager [None req-ecc5c12d-2a3e-430a-9c3c-15d45f1a4068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: ad7badc9-cb11-4532-885a-28fb3d4de9ef] Took 1.02 seconds to deallocate network for instance. [ 801.152881] env[62503]: DEBUG nova.compute.provider_tree [None req-795b6309-d5a9-40f9-8a07-369f392b468e tempest-ServersTestManualDisk-925778780 tempest-ServersTestManualDisk-925778780-project-member] Updating inventory in ProviderTree for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 801.327425] env[62503]: DEBUG nova.compute.manager [req-974c9dae-4816-4842-9ca9-747e67b2a484 req-cb4ac4d7-2203-4183-be61-ac788e94a014 service nova] [instance: 9eee91c6-a949-453b-8ccd-ba986251ed27] Received event network-vif-deleted-64751884-9fbd-4102-a5b7-cdbad8d77b85 {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 801.528144] env[62503]: DEBUG oslo_concurrency.lockutils [None req-d5689ca1-10ce-4a97-b742-119ca139fe26 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 801.566058] env[62503]: DEBUG oslo_concurrency.lockutils [None req-2d6004ed-f8ef-4f69-80bc-a0a595a2cd5a tempest-AttachVolumeTestJSON-1238928908 tempest-AttachVolumeTestJSON-1238928908-project-member] Releasing lock "refresh_cache-9eee91c6-a949-453b-8ccd-ba986251ed27" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 801.566587] env[62503]: DEBUG nova.compute.manager [None req-2d6004ed-f8ef-4f69-80bc-a0a595a2cd5a tempest-AttachVolumeTestJSON-1238928908 tempest-AttachVolumeTestJSON-1238928908-project-member] [instance: 9eee91c6-a949-453b-8ccd-ba986251ed27] Start destroying the instance on the hypervisor. {{(pid=62503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3156}} [ 801.566780] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-2d6004ed-f8ef-4f69-80bc-a0a595a2cd5a tempest-AttachVolumeTestJSON-1238928908 tempest-AttachVolumeTestJSON-1238928908-project-member] [instance: 9eee91c6-a949-453b-8ccd-ba986251ed27] Destroying instance {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 801.567113] env[62503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-91ff5d69-0bc3-400d-b846-ca4d121df77d {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.576239] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43f89e3f-d55c-425a-b3cf-54f7888a0487 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.600041] env[62503]: WARNING nova.virt.vmwareapi.vmops [None req-2d6004ed-f8ef-4f69-80bc-a0a595a2cd5a tempest-AttachVolumeTestJSON-1238928908 tempest-AttachVolumeTestJSON-1238928908-project-member] [instance: 9eee91c6-a949-453b-8ccd-ba986251ed27] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 9eee91c6-a949-453b-8ccd-ba986251ed27 could not be found. [ 801.600141] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-2d6004ed-f8ef-4f69-80bc-a0a595a2cd5a tempest-AttachVolumeTestJSON-1238928908 tempest-AttachVolumeTestJSON-1238928908-project-member] [instance: 9eee91c6-a949-453b-8ccd-ba986251ed27] Instance destroyed {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 801.600343] env[62503]: INFO nova.compute.manager [None req-2d6004ed-f8ef-4f69-80bc-a0a595a2cd5a tempest-AttachVolumeTestJSON-1238928908 tempest-AttachVolumeTestJSON-1238928908-project-member] [instance: 9eee91c6-a949-453b-8ccd-ba986251ed27] Took 0.03 seconds to destroy the instance on the hypervisor. [ 801.600592] env[62503]: DEBUG oslo.service.loopingcall [None req-2d6004ed-f8ef-4f69-80bc-a0a595a2cd5a tempest-AttachVolumeTestJSON-1238928908 tempest-AttachVolumeTestJSON-1238928908-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 801.600812] env[62503]: DEBUG nova.compute.manager [-] [instance: 9eee91c6-a949-453b-8ccd-ba986251ed27] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 801.600905] env[62503]: DEBUG nova.network.neutron [-] [instance: 9eee91c6-a949-453b-8ccd-ba986251ed27] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 801.617870] env[62503]: DEBUG nova.network.neutron [-] [instance: 9eee91c6-a949-453b-8ccd-ba986251ed27] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 801.676918] env[62503]: ERROR nova.scheduler.client.report [None req-795b6309-d5a9-40f9-8a07-369f392b468e tempest-ServersTestManualDisk-925778780 tempest-ServersTestManualDisk-925778780-project-member] [req-7dd3f035-1872-4564-8cfd-e2d23a58aaac] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-7dd3f035-1872-4564-8cfd-e2d23a58aaac"}]} [ 801.692468] env[62503]: DEBUG nova.scheduler.client.report [None req-795b6309-d5a9-40f9-8a07-369f392b468e tempest-ServersTestManualDisk-925778780 tempest-ServersTestManualDisk-925778780-project-member] Refreshing inventories for resource provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:819}} [ 801.706602] env[62503]: DEBUG nova.scheduler.client.report [None req-795b6309-d5a9-40f9-8a07-369f392b468e tempest-ServersTestManualDisk-925778780 tempest-ServersTestManualDisk-925778780-project-member] Updating ProviderTree inventory for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:783}} [ 801.706848] env[62503]: DEBUG nova.compute.provider_tree [None req-795b6309-d5a9-40f9-8a07-369f392b468e tempest-ServersTestManualDisk-925778780 tempest-ServersTestManualDisk-925778780-project-member] Updating inventory in ProviderTree for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 801.718200] env[62503]: DEBUG nova.scheduler.client.report [None req-795b6309-d5a9-40f9-8a07-369f392b468e tempest-ServersTestManualDisk-925778780 tempest-ServersTestManualDisk-925778780-project-member] Refreshing aggregate associations for resource provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2, aggregates: None {{(pid=62503) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:828}} [ 801.737117] env[62503]: DEBUG nova.scheduler.client.report [None req-795b6309-d5a9-40f9-8a07-369f392b468e tempest-ServersTestManualDisk-925778780 tempest-ServersTestManualDisk-925778780-project-member] Refreshing trait associations for resource provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2, traits: HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=62503) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:840}} [ 801.982562] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bb1be5d-4c0e-4a90-8833-37a6360f877d {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.989736] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e128349d-6118-41d6-9d9d-92539416903e {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.018992] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6a049d6-99b2-4f2c-b7b8-d0f189312ae9 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.026342] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17b7c578-7a85-48cb-8cc7-915419da8c17 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.035583] env[62503]: DEBUG oslo_concurrency.lockutils [None req-b540304e-d51f-4246-b92a-23f24c2fb495 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Acquiring lock "c990f365-97df-4203-bd8c-dab822b2d8c3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 802.035826] env[62503]: DEBUG oslo_concurrency.lockutils [None req-b540304e-d51f-4246-b92a-23f24c2fb495 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Lock "c990f365-97df-4203-bd8c-dab822b2d8c3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 802.044323] env[62503]: DEBUG nova.compute.provider_tree [None req-795b6309-d5a9-40f9-8a07-369f392b468e tempest-ServersTestManualDisk-925778780 tempest-ServersTestManualDisk-925778780-project-member] Updating inventory in ProviderTree for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 802.121512] env[62503]: DEBUG nova.network.neutron [-] [instance: 9eee91c6-a949-453b-8ccd-ba986251ed27] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 802.178059] env[62503]: INFO nova.scheduler.client.report [None req-ecc5c12d-2a3e-430a-9c3c-15d45f1a4068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Deleted allocations for instance ad7badc9-cb11-4532-885a-28fb3d4de9ef [ 802.576158] env[62503]: DEBUG nova.scheduler.client.report [None req-795b6309-d5a9-40f9-8a07-369f392b468e tempest-ServersTestManualDisk-925778780 tempest-ServersTestManualDisk-925778780-project-member] Updated inventory for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 with generation 75 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:972}} [ 802.576455] env[62503]: DEBUG nova.compute.provider_tree [None req-795b6309-d5a9-40f9-8a07-369f392b468e tempest-ServersTestManualDisk-925778780 tempest-ServersTestManualDisk-925778780-project-member] Updating resource provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 generation from 75 to 76 during operation: update_inventory {{(pid=62503) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 802.576651] env[62503]: DEBUG nova.compute.provider_tree [None req-795b6309-d5a9-40f9-8a07-369f392b468e tempest-ServersTestManualDisk-925778780 tempest-ServersTestManualDisk-925778780-project-member] Updating inventory in ProviderTree for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 802.624331] env[62503]: INFO nova.compute.manager [-] [instance: 9eee91c6-a949-453b-8ccd-ba986251ed27] Took 1.02 seconds to deallocate network for instance. [ 802.626259] env[62503]: DEBUG nova.compute.claims [None req-2d6004ed-f8ef-4f69-80bc-a0a595a2cd5a tempest-AttachVolumeTestJSON-1238928908 tempest-AttachVolumeTestJSON-1238928908-project-member] [instance: 9eee91c6-a949-453b-8ccd-ba986251ed27] Aborting claim: {{(pid=62503) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 802.626447] env[62503]: DEBUG oslo_concurrency.lockutils [None req-2d6004ed-f8ef-4f69-80bc-a0a595a2cd5a tempest-AttachVolumeTestJSON-1238928908 tempest-AttachVolumeTestJSON-1238928908-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 802.687957] env[62503]: DEBUG oslo_concurrency.lockutils [None req-ecc5c12d-2a3e-430a-9c3c-15d45f1a4068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Lock "ad7badc9-cb11-4532-885a-28fb3d4de9ef" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 124.725s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 803.084916] env[62503]: DEBUG oslo_concurrency.lockutils [None req-795b6309-d5a9-40f9-8a07-369f392b468e tempest-ServersTestManualDisk-925778780 tempest-ServersTestManualDisk-925778780-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 4.169s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 803.085485] env[62503]: DEBUG nova.compute.manager [None req-795b6309-d5a9-40f9-8a07-369f392b468e tempest-ServersTestManualDisk-925778780 tempest-ServersTestManualDisk-925778780-project-member] [instance: f6f17748-815c-417f-bce6-3bc97f23b637] Start building networks asynchronously for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2832}} [ 803.088243] env[62503]: DEBUG oslo_concurrency.lockutils [None req-1bdb5a3e-b1f3-497d-a480-68bcfc62ffc1 tempest-InstanceActionsNegativeTestJSON-133759453 tempest-InstanceActionsNegativeTestJSON-133759453-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.132s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 803.089634] env[62503]: INFO nova.compute.claims [None req-1bdb5a3e-b1f3-497d-a480-68bcfc62ffc1 tempest-InstanceActionsNegativeTestJSON-133759453 tempest-InstanceActionsNegativeTestJSON-133759453-project-member] [instance: 1251e59f-9c01-4115-8400-40aacedd97e2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 803.190512] env[62503]: DEBUG nova.compute.manager [None req-aaad013a-a4dc-432c-87d1-a452b2fca374 tempest-ServerPasswordTestJSON-330364349 tempest-ServerPasswordTestJSON-330364349-project-member] [instance: b1fc7438-2078-435a-9754-19a8a1bc6f5c] Starting instance... {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 803.594512] env[62503]: DEBUG nova.compute.utils [None req-795b6309-d5a9-40f9-8a07-369f392b468e tempest-ServersTestManualDisk-925778780 tempest-ServersTestManualDisk-925778780-project-member] Using /dev/sd instead of None {{(pid=62503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 803.597803] env[62503]: DEBUG nova.compute.manager [None req-795b6309-d5a9-40f9-8a07-369f392b468e tempest-ServersTestManualDisk-925778780 tempest-ServersTestManualDisk-925778780-project-member] [instance: f6f17748-815c-417f-bce6-3bc97f23b637] Allocating IP information in the background. {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 803.597984] env[62503]: DEBUG nova.network.neutron [None req-795b6309-d5a9-40f9-8a07-369f392b468e tempest-ServersTestManualDisk-925778780 tempest-ServersTestManualDisk-925778780-project-member] [instance: f6f17748-815c-417f-bce6-3bc97f23b637] allocate_for_instance() {{(pid=62503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 803.638210] env[62503]: DEBUG nova.policy [None req-795b6309-d5a9-40f9-8a07-369f392b468e tempest-ServersTestManualDisk-925778780 tempest-ServersTestManualDisk-925778780-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'aa08ae07bbda406b81a0048e8b670bba', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9bd06d11646b4f27a14e2f252085eaa3', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62503) authorize /opt/stack/nova/nova/policy.py:201}} [ 803.710383] env[62503]: DEBUG oslo_concurrency.lockutils [None req-aaad013a-a4dc-432c-87d1-a452b2fca374 tempest-ServerPasswordTestJSON-330364349 tempest-ServerPasswordTestJSON-330364349-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 803.960141] env[62503]: DEBUG nova.network.neutron [None req-795b6309-d5a9-40f9-8a07-369f392b468e tempest-ServersTestManualDisk-925778780 tempest-ServersTestManualDisk-925778780-project-member] [instance: f6f17748-815c-417f-bce6-3bc97f23b637] Successfully created port: ad998f5e-6b3a-49ee-ab61-31cfb715bf6a {{(pid=62503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 804.098118] env[62503]: DEBUG nova.compute.manager [None req-795b6309-d5a9-40f9-8a07-369f392b468e tempest-ServersTestManualDisk-925778780 tempest-ServersTestManualDisk-925778780-project-member] [instance: f6f17748-815c-417f-bce6-3bc97f23b637] Start building block device mappings for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2867}} [ 804.354775] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89fe1d40-1a49-4568-8df7-dc6783394c02 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.364457] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0e893c8-12b6-45d4-b179-d2839094d80b {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.397271] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38b749c7-0fd8-4271-be65-a90eb10818b1 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.405272] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f20b6ec2-e075-4258-8a05-0770d1884293 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.413397] env[62503]: DEBUG oslo_concurrency.lockutils [None req-35e1767c-0254-4b7e-b241-cbaf9c357109 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Acquiring lock "9ccdc727-536e-4db8-bad4-960858254758" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 804.413670] env[62503]: DEBUG oslo_concurrency.lockutils [None req-35e1767c-0254-4b7e-b241-cbaf9c357109 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Lock "9ccdc727-536e-4db8-bad4-960858254758" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 804.422563] env[62503]: DEBUG nova.compute.provider_tree [None req-1bdb5a3e-b1f3-497d-a480-68bcfc62ffc1 tempest-InstanceActionsNegativeTestJSON-133759453 tempest-InstanceActionsNegativeTestJSON-133759453-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 804.702968] env[62503]: DEBUG nova.compute.manager [req-f43c6996-c62c-498b-8273-7bfacfe1de2e req-7d21a137-826d-4445-a11d-35fbee92301c service nova] [instance: f6f17748-815c-417f-bce6-3bc97f23b637] Received event network-changed-ad998f5e-6b3a-49ee-ab61-31cfb715bf6a {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 804.703507] env[62503]: DEBUG nova.compute.manager [req-f43c6996-c62c-498b-8273-7bfacfe1de2e req-7d21a137-826d-4445-a11d-35fbee92301c service nova] [instance: f6f17748-815c-417f-bce6-3bc97f23b637] Refreshing instance network info cache due to event network-changed-ad998f5e-6b3a-49ee-ab61-31cfb715bf6a. {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11432}} [ 804.706934] env[62503]: DEBUG oslo_concurrency.lockutils [req-f43c6996-c62c-498b-8273-7bfacfe1de2e req-7d21a137-826d-4445-a11d-35fbee92301c service nova] Acquiring lock "refresh_cache-f6f17748-815c-417f-bce6-3bc97f23b637" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 804.706934] env[62503]: DEBUG oslo_concurrency.lockutils [req-f43c6996-c62c-498b-8273-7bfacfe1de2e req-7d21a137-826d-4445-a11d-35fbee92301c service nova] Acquired lock "refresh_cache-f6f17748-815c-417f-bce6-3bc97f23b637" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 804.706934] env[62503]: DEBUG nova.network.neutron [req-f43c6996-c62c-498b-8273-7bfacfe1de2e req-7d21a137-826d-4445-a11d-35fbee92301c service nova] [instance: f6f17748-815c-417f-bce6-3bc97f23b637] Refreshing network info cache for port ad998f5e-6b3a-49ee-ab61-31cfb715bf6a {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 804.926180] env[62503]: DEBUG nova.scheduler.client.report [None req-1bdb5a3e-b1f3-497d-a480-68bcfc62ffc1 tempest-InstanceActionsNegativeTestJSON-133759453 tempest-InstanceActionsNegativeTestJSON-133759453-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 804.929945] env[62503]: ERROR nova.compute.manager [None req-795b6309-d5a9-40f9-8a07-369f392b468e tempest-ServersTestManualDisk-925778780 tempest-ServersTestManualDisk-925778780-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port ad998f5e-6b3a-49ee-ab61-31cfb715bf6a, please check neutron logs for more information. [ 804.929945] env[62503]: ERROR nova.compute.manager Traceback (most recent call last): [ 804.929945] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 804.929945] env[62503]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 804.929945] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 804.929945] env[62503]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 804.929945] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 804.929945] env[62503]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 804.929945] env[62503]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 804.929945] env[62503]: ERROR nova.compute.manager self.force_reraise() [ 804.929945] env[62503]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 804.929945] env[62503]: ERROR nova.compute.manager raise self.value [ 804.929945] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 804.929945] env[62503]: ERROR nova.compute.manager updated_port = self._update_port( [ 804.929945] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 804.929945] env[62503]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 804.930362] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 804.930362] env[62503]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 804.930362] env[62503]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port ad998f5e-6b3a-49ee-ab61-31cfb715bf6a, please check neutron logs for more information. [ 804.930362] env[62503]: ERROR nova.compute.manager [ 804.930362] env[62503]: Traceback (most recent call last): [ 804.930362] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 804.930362] env[62503]: listener.cb(fileno) [ 804.930362] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 804.930362] env[62503]: result = function(*args, **kwargs) [ 804.930362] env[62503]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 804.930362] env[62503]: return func(*args, **kwargs) [ 804.930362] env[62503]: File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 804.930362] env[62503]: raise e [ 804.930362] env[62503]: File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 804.930362] env[62503]: nwinfo = self.network_api.allocate_for_instance( [ 804.930362] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 804.930362] env[62503]: created_port_ids = self._update_ports_for_instance( [ 804.930362] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 804.930362] env[62503]: with excutils.save_and_reraise_exception(): [ 804.930362] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 804.930362] env[62503]: self.force_reraise() [ 804.930362] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 804.930362] env[62503]: raise self.value [ 804.930362] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 804.930362] env[62503]: updated_port = self._update_port( [ 804.930362] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 804.930362] env[62503]: _ensure_no_port_binding_failure(port) [ 804.930362] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 804.930362] env[62503]: raise exception.PortBindingFailed(port_id=port['id']) [ 804.931011] env[62503]: nova.exception.PortBindingFailed: Binding failed for port ad998f5e-6b3a-49ee-ab61-31cfb715bf6a, please check neutron logs for more information. [ 804.931011] env[62503]: Removing descriptor: 14 [ 805.114863] env[62503]: DEBUG nova.compute.manager [None req-795b6309-d5a9-40f9-8a07-369f392b468e tempest-ServersTestManualDisk-925778780 tempest-ServersTestManualDisk-925778780-project-member] [instance: f6f17748-815c-417f-bce6-3bc97f23b637] Start spawning the instance on the hypervisor. {{(pid=62503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2641}} [ 805.142506] env[62503]: DEBUG nova.virt.hardware [None req-795b6309-d5a9-40f9-8a07-369f392b468e tempest-ServersTestManualDisk-925778780 tempest-ServersTestManualDisk-925778780-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:26:20Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:26:03Z,direct_url=,disk_format='vmdk',id=8150ca02-f879-471d-8913-459408f127a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26d9ee75d25b4018b8bb1fb11c8bd98c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:26:04Z,virtual_size=,visibility=), allow threads: False {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 805.142748] env[62503]: DEBUG nova.virt.hardware [None req-795b6309-d5a9-40f9-8a07-369f392b468e tempest-ServersTestManualDisk-925778780 tempest-ServersTestManualDisk-925778780-project-member] Flavor limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 805.142899] env[62503]: DEBUG nova.virt.hardware [None req-795b6309-d5a9-40f9-8a07-369f392b468e tempest-ServersTestManualDisk-925778780 tempest-ServersTestManualDisk-925778780-project-member] Image limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 805.143091] env[62503]: DEBUG nova.virt.hardware [None req-795b6309-d5a9-40f9-8a07-369f392b468e tempest-ServersTestManualDisk-925778780 tempest-ServersTestManualDisk-925778780-project-member] Flavor pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 805.143237] env[62503]: DEBUG nova.virt.hardware [None req-795b6309-d5a9-40f9-8a07-369f392b468e tempest-ServersTestManualDisk-925778780 tempest-ServersTestManualDisk-925778780-project-member] Image pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 805.143381] env[62503]: DEBUG nova.virt.hardware [None req-795b6309-d5a9-40f9-8a07-369f392b468e tempest-ServersTestManualDisk-925778780 tempest-ServersTestManualDisk-925778780-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 805.143584] env[62503]: DEBUG nova.virt.hardware [None req-795b6309-d5a9-40f9-8a07-369f392b468e tempest-ServersTestManualDisk-925778780 tempest-ServersTestManualDisk-925778780-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 805.143739] env[62503]: DEBUG nova.virt.hardware [None req-795b6309-d5a9-40f9-8a07-369f392b468e tempest-ServersTestManualDisk-925778780 tempest-ServersTestManualDisk-925778780-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 805.143898] env[62503]: DEBUG nova.virt.hardware [None req-795b6309-d5a9-40f9-8a07-369f392b468e tempest-ServersTestManualDisk-925778780 tempest-ServersTestManualDisk-925778780-project-member] Got 1 possible topologies {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 805.144066] env[62503]: DEBUG nova.virt.hardware [None req-795b6309-d5a9-40f9-8a07-369f392b468e tempest-ServersTestManualDisk-925778780 tempest-ServersTestManualDisk-925778780-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 805.144253] env[62503]: DEBUG nova.virt.hardware [None req-795b6309-d5a9-40f9-8a07-369f392b468e tempest-ServersTestManualDisk-925778780 tempest-ServersTestManualDisk-925778780-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 805.145766] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc1c1ed5-b395-43eb-95e1-8a66e8e8fb05 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.153999] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a534b23d-1404-4eac-8486-373491e75954 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.167310] env[62503]: ERROR nova.compute.manager [None req-795b6309-d5a9-40f9-8a07-369f392b468e tempest-ServersTestManualDisk-925778780 tempest-ServersTestManualDisk-925778780-project-member] [instance: f6f17748-815c-417f-bce6-3bc97f23b637] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port ad998f5e-6b3a-49ee-ab61-31cfb715bf6a, please check neutron logs for more information. [ 805.167310] env[62503]: ERROR nova.compute.manager [instance: f6f17748-815c-417f-bce6-3bc97f23b637] Traceback (most recent call last): [ 805.167310] env[62503]: ERROR nova.compute.manager [instance: f6f17748-815c-417f-bce6-3bc97f23b637] File "/opt/stack/nova/nova/compute/manager.py", line 2897, in _build_resources [ 805.167310] env[62503]: ERROR nova.compute.manager [instance: f6f17748-815c-417f-bce6-3bc97f23b637] yield resources [ 805.167310] env[62503]: ERROR nova.compute.manager [instance: f6f17748-815c-417f-bce6-3bc97f23b637] File "/opt/stack/nova/nova/compute/manager.py", line 2644, in _build_and_run_instance [ 805.167310] env[62503]: ERROR nova.compute.manager [instance: f6f17748-815c-417f-bce6-3bc97f23b637] self.driver.spawn(context, instance, image_meta, [ 805.167310] env[62503]: ERROR nova.compute.manager [instance: f6f17748-815c-417f-bce6-3bc97f23b637] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 805.167310] env[62503]: ERROR nova.compute.manager [instance: f6f17748-815c-417f-bce6-3bc97f23b637] self._vmops.spawn(context, instance, image_meta, injected_files, [ 805.167310] env[62503]: ERROR nova.compute.manager [instance: f6f17748-815c-417f-bce6-3bc97f23b637] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 805.167310] env[62503]: ERROR nova.compute.manager [instance: f6f17748-815c-417f-bce6-3bc97f23b637] vm_ref = self.build_virtual_machine(instance, [ 805.167310] env[62503]: ERROR nova.compute.manager [instance: f6f17748-815c-417f-bce6-3bc97f23b637] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 805.167602] env[62503]: ERROR nova.compute.manager [instance: f6f17748-815c-417f-bce6-3bc97f23b637] vif_infos = vmwarevif.get_vif_info(self._session, [ 805.167602] env[62503]: ERROR nova.compute.manager [instance: f6f17748-815c-417f-bce6-3bc97f23b637] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 805.167602] env[62503]: ERROR nova.compute.manager [instance: f6f17748-815c-417f-bce6-3bc97f23b637] for vif in network_info: [ 805.167602] env[62503]: ERROR nova.compute.manager [instance: f6f17748-815c-417f-bce6-3bc97f23b637] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 805.167602] env[62503]: ERROR nova.compute.manager [instance: f6f17748-815c-417f-bce6-3bc97f23b637] return self._sync_wrapper(fn, *args, **kwargs) [ 805.167602] env[62503]: ERROR nova.compute.manager [instance: f6f17748-815c-417f-bce6-3bc97f23b637] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 805.167602] env[62503]: ERROR nova.compute.manager [instance: f6f17748-815c-417f-bce6-3bc97f23b637] self.wait() [ 805.167602] env[62503]: ERROR nova.compute.manager [instance: f6f17748-815c-417f-bce6-3bc97f23b637] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 805.167602] env[62503]: ERROR nova.compute.manager [instance: f6f17748-815c-417f-bce6-3bc97f23b637] self[:] = self._gt.wait() [ 805.167602] env[62503]: ERROR nova.compute.manager [instance: f6f17748-815c-417f-bce6-3bc97f23b637] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 805.167602] env[62503]: ERROR nova.compute.manager [instance: f6f17748-815c-417f-bce6-3bc97f23b637] return self._exit_event.wait() [ 805.167602] env[62503]: ERROR nova.compute.manager [instance: f6f17748-815c-417f-bce6-3bc97f23b637] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 805.167602] env[62503]: ERROR nova.compute.manager [instance: f6f17748-815c-417f-bce6-3bc97f23b637] current.throw(*self._exc) [ 805.167869] env[62503]: ERROR nova.compute.manager [instance: f6f17748-815c-417f-bce6-3bc97f23b637] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 805.167869] env[62503]: ERROR nova.compute.manager [instance: f6f17748-815c-417f-bce6-3bc97f23b637] result = function(*args, **kwargs) [ 805.167869] env[62503]: ERROR nova.compute.manager [instance: f6f17748-815c-417f-bce6-3bc97f23b637] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 805.167869] env[62503]: ERROR nova.compute.manager [instance: f6f17748-815c-417f-bce6-3bc97f23b637] return func(*args, **kwargs) [ 805.167869] env[62503]: ERROR nova.compute.manager [instance: f6f17748-815c-417f-bce6-3bc97f23b637] File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 805.167869] env[62503]: ERROR nova.compute.manager [instance: f6f17748-815c-417f-bce6-3bc97f23b637] raise e [ 805.167869] env[62503]: ERROR nova.compute.manager [instance: f6f17748-815c-417f-bce6-3bc97f23b637] File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 805.167869] env[62503]: ERROR nova.compute.manager [instance: f6f17748-815c-417f-bce6-3bc97f23b637] nwinfo = self.network_api.allocate_for_instance( [ 805.167869] env[62503]: ERROR nova.compute.manager [instance: f6f17748-815c-417f-bce6-3bc97f23b637] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 805.167869] env[62503]: ERROR nova.compute.manager [instance: f6f17748-815c-417f-bce6-3bc97f23b637] created_port_ids = self._update_ports_for_instance( [ 805.167869] env[62503]: ERROR nova.compute.manager [instance: f6f17748-815c-417f-bce6-3bc97f23b637] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 805.167869] env[62503]: ERROR nova.compute.manager [instance: f6f17748-815c-417f-bce6-3bc97f23b637] with excutils.save_and_reraise_exception(): [ 805.167869] env[62503]: ERROR nova.compute.manager [instance: f6f17748-815c-417f-bce6-3bc97f23b637] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 805.168156] env[62503]: ERROR nova.compute.manager [instance: f6f17748-815c-417f-bce6-3bc97f23b637] self.force_reraise() [ 805.168156] env[62503]: ERROR nova.compute.manager [instance: f6f17748-815c-417f-bce6-3bc97f23b637] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 805.168156] env[62503]: ERROR nova.compute.manager [instance: f6f17748-815c-417f-bce6-3bc97f23b637] raise self.value [ 805.168156] env[62503]: ERROR nova.compute.manager [instance: f6f17748-815c-417f-bce6-3bc97f23b637] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 805.168156] env[62503]: ERROR nova.compute.manager [instance: f6f17748-815c-417f-bce6-3bc97f23b637] updated_port = self._update_port( [ 805.168156] env[62503]: ERROR nova.compute.manager [instance: f6f17748-815c-417f-bce6-3bc97f23b637] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 805.168156] env[62503]: ERROR nova.compute.manager [instance: f6f17748-815c-417f-bce6-3bc97f23b637] _ensure_no_port_binding_failure(port) [ 805.168156] env[62503]: ERROR nova.compute.manager [instance: f6f17748-815c-417f-bce6-3bc97f23b637] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 805.168156] env[62503]: ERROR nova.compute.manager [instance: f6f17748-815c-417f-bce6-3bc97f23b637] raise exception.PortBindingFailed(port_id=port['id']) [ 805.168156] env[62503]: ERROR nova.compute.manager [instance: f6f17748-815c-417f-bce6-3bc97f23b637] nova.exception.PortBindingFailed: Binding failed for port ad998f5e-6b3a-49ee-ab61-31cfb715bf6a, please check neutron logs for more information. [ 805.168156] env[62503]: ERROR nova.compute.manager [instance: f6f17748-815c-417f-bce6-3bc97f23b637] [ 805.168156] env[62503]: INFO nova.compute.manager [None req-795b6309-d5a9-40f9-8a07-369f392b468e tempest-ServersTestManualDisk-925778780 tempest-ServersTestManualDisk-925778780-project-member] [instance: f6f17748-815c-417f-bce6-3bc97f23b637] Terminating instance [ 805.170513] env[62503]: DEBUG oslo_concurrency.lockutils [None req-795b6309-d5a9-40f9-8a07-369f392b468e tempest-ServersTestManualDisk-925778780 tempest-ServersTestManualDisk-925778780-project-member] Acquiring lock "refresh_cache-f6f17748-815c-417f-bce6-3bc97f23b637" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 805.226031] env[62503]: DEBUG nova.network.neutron [req-f43c6996-c62c-498b-8273-7bfacfe1de2e req-7d21a137-826d-4445-a11d-35fbee92301c service nova] [instance: f6f17748-815c-417f-bce6-3bc97f23b637] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 805.298766] env[62503]: DEBUG nova.network.neutron [req-f43c6996-c62c-498b-8273-7bfacfe1de2e req-7d21a137-826d-4445-a11d-35fbee92301c service nova] [instance: f6f17748-815c-417f-bce6-3bc97f23b637] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 805.432417] env[62503]: DEBUG oslo_concurrency.lockutils [None req-1bdb5a3e-b1f3-497d-a480-68bcfc62ffc1 tempest-InstanceActionsNegativeTestJSON-133759453 tempest-InstanceActionsNegativeTestJSON-133759453-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.344s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 805.432937] env[62503]: DEBUG nova.compute.manager [None req-1bdb5a3e-b1f3-497d-a480-68bcfc62ffc1 tempest-InstanceActionsNegativeTestJSON-133759453 tempest-InstanceActionsNegativeTestJSON-133759453-project-member] [instance: 1251e59f-9c01-4115-8400-40aacedd97e2] Start building networks asynchronously for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2832}} [ 805.435838] env[62503]: DEBUG oslo_concurrency.lockutils [None req-3dbf8d33-063c-4c40-92c7-9bf6b35e4526 tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 26.305s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 805.800928] env[62503]: DEBUG oslo_concurrency.lockutils [req-f43c6996-c62c-498b-8273-7bfacfe1de2e req-7d21a137-826d-4445-a11d-35fbee92301c service nova] Releasing lock "refresh_cache-f6f17748-815c-417f-bce6-3bc97f23b637" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 805.801361] env[62503]: DEBUG oslo_concurrency.lockutils [None req-795b6309-d5a9-40f9-8a07-369f392b468e tempest-ServersTestManualDisk-925778780 tempest-ServersTestManualDisk-925778780-project-member] Acquired lock "refresh_cache-f6f17748-815c-417f-bce6-3bc97f23b637" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 805.801549] env[62503]: DEBUG nova.network.neutron [None req-795b6309-d5a9-40f9-8a07-369f392b468e tempest-ServersTestManualDisk-925778780 tempest-ServersTestManualDisk-925778780-project-member] [instance: f6f17748-815c-417f-bce6-3bc97f23b637] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 805.940252] env[62503]: DEBUG nova.compute.utils [None req-1bdb5a3e-b1f3-497d-a480-68bcfc62ffc1 tempest-InstanceActionsNegativeTestJSON-133759453 tempest-InstanceActionsNegativeTestJSON-133759453-project-member] Using /dev/sd instead of None {{(pid=62503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 805.946023] env[62503]: DEBUG nova.compute.manager [None req-1bdb5a3e-b1f3-497d-a480-68bcfc62ffc1 tempest-InstanceActionsNegativeTestJSON-133759453 tempest-InstanceActionsNegativeTestJSON-133759453-project-member] [instance: 1251e59f-9c01-4115-8400-40aacedd97e2] Allocating IP information in the background. {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 805.946023] env[62503]: DEBUG nova.network.neutron [None req-1bdb5a3e-b1f3-497d-a480-68bcfc62ffc1 tempest-InstanceActionsNegativeTestJSON-133759453 tempest-InstanceActionsNegativeTestJSON-133759453-project-member] [instance: 1251e59f-9c01-4115-8400-40aacedd97e2] allocate_for_instance() {{(pid=62503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 805.996925] env[62503]: DEBUG nova.policy [None req-1bdb5a3e-b1f3-497d-a480-68bcfc62ffc1 tempest-InstanceActionsNegativeTestJSON-133759453 tempest-InstanceActionsNegativeTestJSON-133759453-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5fae87313d8e40bfad2bf380d802bb70', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0a2c654ee13047d6a75ffbd6f5d1c6a8', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62503) authorize /opt/stack/nova/nova/policy.py:201}} [ 806.151231] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f677549-788f-4d58-9c85-89af96646b29 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.159176] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b95653e2-7405-4706-9f1a-a5788c2afbc2 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.191186] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e811b82c-1e2d-471e-acac-a27651d244ea {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.199014] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12b809f7-5e50-46bf-9648-bad6f2f29c27 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.212497] env[62503]: DEBUG nova.compute.provider_tree [None req-3dbf8d33-063c-4c40-92c7-9bf6b35e4526 tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 806.323148] env[62503]: DEBUG nova.network.neutron [None req-795b6309-d5a9-40f9-8a07-369f392b468e tempest-ServersTestManualDisk-925778780 tempest-ServersTestManualDisk-925778780-project-member] [instance: f6f17748-815c-417f-bce6-3bc97f23b637] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 806.326297] env[62503]: DEBUG nova.network.neutron [None req-1bdb5a3e-b1f3-497d-a480-68bcfc62ffc1 tempest-InstanceActionsNegativeTestJSON-133759453 tempest-InstanceActionsNegativeTestJSON-133759453-project-member] [instance: 1251e59f-9c01-4115-8400-40aacedd97e2] Successfully created port: 0b345b9f-73a4-4907-b9be-78896c2c7c3b {{(pid=62503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 806.399446] env[62503]: DEBUG nova.network.neutron [None req-795b6309-d5a9-40f9-8a07-369f392b468e tempest-ServersTestManualDisk-925778780 tempest-ServersTestManualDisk-925778780-project-member] [instance: f6f17748-815c-417f-bce6-3bc97f23b637] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 806.445176] env[62503]: DEBUG nova.compute.manager [None req-1bdb5a3e-b1f3-497d-a480-68bcfc62ffc1 tempest-InstanceActionsNegativeTestJSON-133759453 tempest-InstanceActionsNegativeTestJSON-133759453-project-member] [instance: 1251e59f-9c01-4115-8400-40aacedd97e2] Start building block device mappings for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2867}} [ 806.715829] env[62503]: DEBUG nova.scheduler.client.report [None req-3dbf8d33-063c-4c40-92c7-9bf6b35e4526 tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 806.729568] env[62503]: DEBUG nova.compute.manager [req-806cabbd-4476-4b22-9c52-d1641c0af4c1 req-289e6c0e-5c2f-4e17-9b0a-a63d6cc76ea8 service nova] [instance: f6f17748-815c-417f-bce6-3bc97f23b637] Received event network-vif-deleted-ad998f5e-6b3a-49ee-ab61-31cfb715bf6a {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 806.903049] env[62503]: DEBUG oslo_concurrency.lockutils [None req-795b6309-d5a9-40f9-8a07-369f392b468e tempest-ServersTestManualDisk-925778780 tempest-ServersTestManualDisk-925778780-project-member] Releasing lock "refresh_cache-f6f17748-815c-417f-bce6-3bc97f23b637" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 806.903049] env[62503]: DEBUG nova.compute.manager [None req-795b6309-d5a9-40f9-8a07-369f392b468e tempest-ServersTestManualDisk-925778780 tempest-ServersTestManualDisk-925778780-project-member] [instance: f6f17748-815c-417f-bce6-3bc97f23b637] Start destroying the instance on the hypervisor. {{(pid=62503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3156}} [ 806.903173] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-795b6309-d5a9-40f9-8a07-369f392b468e tempest-ServersTestManualDisk-925778780 tempest-ServersTestManualDisk-925778780-project-member] [instance: f6f17748-815c-417f-bce6-3bc97f23b637] Destroying instance {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 806.903456] env[62503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2fba7db4-5518-43e7-a45a-7119850322c1 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.915991] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d53f6d04-50c4-4b40-a3d0-e42233c97988 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.940308] env[62503]: WARNING nova.virt.vmwareapi.vmops [None req-795b6309-d5a9-40f9-8a07-369f392b468e tempest-ServersTestManualDisk-925778780 tempest-ServersTestManualDisk-925778780-project-member] [instance: f6f17748-815c-417f-bce6-3bc97f23b637] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance f6f17748-815c-417f-bce6-3bc97f23b637 could not be found. [ 806.940610] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-795b6309-d5a9-40f9-8a07-369f392b468e tempest-ServersTestManualDisk-925778780 tempest-ServersTestManualDisk-925778780-project-member] [instance: f6f17748-815c-417f-bce6-3bc97f23b637] Instance destroyed {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 806.940812] env[62503]: INFO nova.compute.manager [None req-795b6309-d5a9-40f9-8a07-369f392b468e tempest-ServersTestManualDisk-925778780 tempest-ServersTestManualDisk-925778780-project-member] [instance: f6f17748-815c-417f-bce6-3bc97f23b637] Took 0.04 seconds to destroy the instance on the hypervisor. [ 806.941077] env[62503]: DEBUG oslo.service.loopingcall [None req-795b6309-d5a9-40f9-8a07-369f392b468e tempest-ServersTestManualDisk-925778780 tempest-ServersTestManualDisk-925778780-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 806.941312] env[62503]: DEBUG nova.compute.manager [-] [instance: f6f17748-815c-417f-bce6-3bc97f23b637] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 806.942243] env[62503]: DEBUG nova.network.neutron [-] [instance: f6f17748-815c-417f-bce6-3bc97f23b637] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 806.956626] env[62503]: DEBUG nova.network.neutron [-] [instance: f6f17748-815c-417f-bce6-3bc97f23b637] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 807.204912] env[62503]: ERROR nova.compute.manager [None req-1bdb5a3e-b1f3-497d-a480-68bcfc62ffc1 tempest-InstanceActionsNegativeTestJSON-133759453 tempest-InstanceActionsNegativeTestJSON-133759453-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 0b345b9f-73a4-4907-b9be-78896c2c7c3b, please check neutron logs for more information. [ 807.204912] env[62503]: ERROR nova.compute.manager Traceback (most recent call last): [ 807.204912] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 807.204912] env[62503]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 807.204912] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 807.204912] env[62503]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 807.204912] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 807.204912] env[62503]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 807.204912] env[62503]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 807.204912] env[62503]: ERROR nova.compute.manager self.force_reraise() [ 807.204912] env[62503]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 807.204912] env[62503]: ERROR nova.compute.manager raise self.value [ 807.204912] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 807.204912] env[62503]: ERROR nova.compute.manager updated_port = self._update_port( [ 807.204912] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 807.204912] env[62503]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 807.205468] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 807.205468] env[62503]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 807.205468] env[62503]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 0b345b9f-73a4-4907-b9be-78896c2c7c3b, please check neutron logs for more information. [ 807.205468] env[62503]: ERROR nova.compute.manager [ 807.205468] env[62503]: Traceback (most recent call last): [ 807.205468] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 807.205468] env[62503]: listener.cb(fileno) [ 807.205468] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 807.205468] env[62503]: result = function(*args, **kwargs) [ 807.205468] env[62503]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 807.205468] env[62503]: return func(*args, **kwargs) [ 807.205468] env[62503]: File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 807.205468] env[62503]: raise e [ 807.205468] env[62503]: File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 807.205468] env[62503]: nwinfo = self.network_api.allocate_for_instance( [ 807.205468] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 807.205468] env[62503]: created_port_ids = self._update_ports_for_instance( [ 807.205468] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 807.205468] env[62503]: with excutils.save_and_reraise_exception(): [ 807.205468] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 807.205468] env[62503]: self.force_reraise() [ 807.205468] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 807.205468] env[62503]: raise self.value [ 807.205468] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 807.205468] env[62503]: updated_port = self._update_port( [ 807.205468] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 807.205468] env[62503]: _ensure_no_port_binding_failure(port) [ 807.205468] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 807.205468] env[62503]: raise exception.PortBindingFailed(port_id=port['id']) [ 807.206062] env[62503]: nova.exception.PortBindingFailed: Binding failed for port 0b345b9f-73a4-4907-b9be-78896c2c7c3b, please check neutron logs for more information. [ 807.206062] env[62503]: Removing descriptor: 14 [ 807.221681] env[62503]: DEBUG oslo_concurrency.lockutils [None req-3dbf8d33-063c-4c40-92c7-9bf6b35e4526 tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.786s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 807.222208] env[62503]: ERROR nova.compute.manager [None req-3dbf8d33-063c-4c40-92c7-9bf6b35e4526 tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] [instance: 32d4fda5-6d30-4416-b187-cf5548cb23bf] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port e7c06575-72b9-4935-9724-8bb24022dfa9, please check neutron logs for more information. [ 807.222208] env[62503]: ERROR nova.compute.manager [instance: 32d4fda5-6d30-4416-b187-cf5548cb23bf] Traceback (most recent call last): [ 807.222208] env[62503]: ERROR nova.compute.manager [instance: 32d4fda5-6d30-4416-b187-cf5548cb23bf] File "/opt/stack/nova/nova/compute/manager.py", line 2644, in _build_and_run_instance [ 807.222208] env[62503]: ERROR nova.compute.manager [instance: 32d4fda5-6d30-4416-b187-cf5548cb23bf] self.driver.spawn(context, instance, image_meta, [ 807.222208] env[62503]: ERROR nova.compute.manager [instance: 32d4fda5-6d30-4416-b187-cf5548cb23bf] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 807.222208] env[62503]: ERROR nova.compute.manager [instance: 32d4fda5-6d30-4416-b187-cf5548cb23bf] self._vmops.spawn(context, instance, image_meta, injected_files, [ 807.222208] env[62503]: ERROR nova.compute.manager [instance: 32d4fda5-6d30-4416-b187-cf5548cb23bf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 807.222208] env[62503]: ERROR nova.compute.manager [instance: 32d4fda5-6d30-4416-b187-cf5548cb23bf] vm_ref = self.build_virtual_machine(instance, [ 807.222208] env[62503]: ERROR nova.compute.manager [instance: 32d4fda5-6d30-4416-b187-cf5548cb23bf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 807.222208] env[62503]: ERROR nova.compute.manager [instance: 32d4fda5-6d30-4416-b187-cf5548cb23bf] vif_infos = vmwarevif.get_vif_info(self._session, [ 807.222208] env[62503]: ERROR nova.compute.manager [instance: 32d4fda5-6d30-4416-b187-cf5548cb23bf] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 807.222998] env[62503]: ERROR nova.compute.manager [instance: 32d4fda5-6d30-4416-b187-cf5548cb23bf] for vif in network_info: [ 807.222998] env[62503]: ERROR nova.compute.manager [instance: 32d4fda5-6d30-4416-b187-cf5548cb23bf] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 807.222998] env[62503]: ERROR nova.compute.manager [instance: 32d4fda5-6d30-4416-b187-cf5548cb23bf] return self._sync_wrapper(fn, *args, **kwargs) [ 807.222998] env[62503]: ERROR nova.compute.manager [instance: 32d4fda5-6d30-4416-b187-cf5548cb23bf] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 807.222998] env[62503]: ERROR nova.compute.manager [instance: 32d4fda5-6d30-4416-b187-cf5548cb23bf] self.wait() [ 807.222998] env[62503]: ERROR nova.compute.manager [instance: 32d4fda5-6d30-4416-b187-cf5548cb23bf] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 807.222998] env[62503]: ERROR nova.compute.manager [instance: 32d4fda5-6d30-4416-b187-cf5548cb23bf] self[:] = self._gt.wait() [ 807.222998] env[62503]: ERROR nova.compute.manager [instance: 32d4fda5-6d30-4416-b187-cf5548cb23bf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 807.222998] env[62503]: ERROR nova.compute.manager [instance: 32d4fda5-6d30-4416-b187-cf5548cb23bf] return self._exit_event.wait() [ 807.222998] env[62503]: ERROR nova.compute.manager [instance: 32d4fda5-6d30-4416-b187-cf5548cb23bf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 807.222998] env[62503]: ERROR nova.compute.manager [instance: 32d4fda5-6d30-4416-b187-cf5548cb23bf] result = hub.switch() [ 807.222998] env[62503]: ERROR nova.compute.manager [instance: 32d4fda5-6d30-4416-b187-cf5548cb23bf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 807.222998] env[62503]: ERROR nova.compute.manager [instance: 32d4fda5-6d30-4416-b187-cf5548cb23bf] return self.greenlet.switch() [ 807.223346] env[62503]: ERROR nova.compute.manager [instance: 32d4fda5-6d30-4416-b187-cf5548cb23bf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 807.223346] env[62503]: ERROR nova.compute.manager [instance: 32d4fda5-6d30-4416-b187-cf5548cb23bf] result = function(*args, **kwargs) [ 807.223346] env[62503]: ERROR nova.compute.manager [instance: 32d4fda5-6d30-4416-b187-cf5548cb23bf] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 807.223346] env[62503]: ERROR nova.compute.manager [instance: 32d4fda5-6d30-4416-b187-cf5548cb23bf] return func(*args, **kwargs) [ 807.223346] env[62503]: ERROR nova.compute.manager [instance: 32d4fda5-6d30-4416-b187-cf5548cb23bf] File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 807.223346] env[62503]: ERROR nova.compute.manager [instance: 32d4fda5-6d30-4416-b187-cf5548cb23bf] raise e [ 807.223346] env[62503]: ERROR nova.compute.manager [instance: 32d4fda5-6d30-4416-b187-cf5548cb23bf] File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 807.223346] env[62503]: ERROR nova.compute.manager [instance: 32d4fda5-6d30-4416-b187-cf5548cb23bf] nwinfo = self.network_api.allocate_for_instance( [ 807.223346] env[62503]: ERROR nova.compute.manager [instance: 32d4fda5-6d30-4416-b187-cf5548cb23bf] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 807.223346] env[62503]: ERROR nova.compute.manager [instance: 32d4fda5-6d30-4416-b187-cf5548cb23bf] created_port_ids = self._update_ports_for_instance( [ 807.223346] env[62503]: ERROR nova.compute.manager [instance: 32d4fda5-6d30-4416-b187-cf5548cb23bf] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 807.223346] env[62503]: ERROR nova.compute.manager [instance: 32d4fda5-6d30-4416-b187-cf5548cb23bf] with excutils.save_and_reraise_exception(): [ 807.223346] env[62503]: ERROR nova.compute.manager [instance: 32d4fda5-6d30-4416-b187-cf5548cb23bf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 807.223621] env[62503]: ERROR nova.compute.manager [instance: 32d4fda5-6d30-4416-b187-cf5548cb23bf] self.force_reraise() [ 807.223621] env[62503]: ERROR nova.compute.manager [instance: 32d4fda5-6d30-4416-b187-cf5548cb23bf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 807.223621] env[62503]: ERROR nova.compute.manager [instance: 32d4fda5-6d30-4416-b187-cf5548cb23bf] raise self.value [ 807.223621] env[62503]: ERROR nova.compute.manager [instance: 32d4fda5-6d30-4416-b187-cf5548cb23bf] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 807.223621] env[62503]: ERROR nova.compute.manager [instance: 32d4fda5-6d30-4416-b187-cf5548cb23bf] updated_port = self._update_port( [ 807.223621] env[62503]: ERROR nova.compute.manager [instance: 32d4fda5-6d30-4416-b187-cf5548cb23bf] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 807.223621] env[62503]: ERROR nova.compute.manager [instance: 32d4fda5-6d30-4416-b187-cf5548cb23bf] _ensure_no_port_binding_failure(port) [ 807.223621] env[62503]: ERROR nova.compute.manager [instance: 32d4fda5-6d30-4416-b187-cf5548cb23bf] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 807.223621] env[62503]: ERROR nova.compute.manager [instance: 32d4fda5-6d30-4416-b187-cf5548cb23bf] raise exception.PortBindingFailed(port_id=port['id']) [ 807.223621] env[62503]: ERROR nova.compute.manager [instance: 32d4fda5-6d30-4416-b187-cf5548cb23bf] nova.exception.PortBindingFailed: Binding failed for port e7c06575-72b9-4935-9724-8bb24022dfa9, please check neutron logs for more information. [ 807.223621] env[62503]: ERROR nova.compute.manager [instance: 32d4fda5-6d30-4416-b187-cf5548cb23bf] [ 807.223940] env[62503]: DEBUG nova.compute.utils [None req-3dbf8d33-063c-4c40-92c7-9bf6b35e4526 tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] [instance: 32d4fda5-6d30-4416-b187-cf5548cb23bf] Binding failed for port e7c06575-72b9-4935-9724-8bb24022dfa9, please check neutron logs for more information. {{(pid=62503) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 807.224245] env[62503]: DEBUG oslo_concurrency.lockutils [None req-bed06986-6337-47b4-a3da-32c888b74905 tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 25.608s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 807.227039] env[62503]: DEBUG nova.compute.manager [None req-3dbf8d33-063c-4c40-92c7-9bf6b35e4526 tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] [instance: 32d4fda5-6d30-4416-b187-cf5548cb23bf] Build of instance 32d4fda5-6d30-4416-b187-cf5548cb23bf was re-scheduled: Binding failed for port e7c06575-72b9-4935-9724-8bb24022dfa9, please check neutron logs for more information. {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2483}} [ 807.227456] env[62503]: DEBUG nova.compute.manager [None req-3dbf8d33-063c-4c40-92c7-9bf6b35e4526 tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] [instance: 32d4fda5-6d30-4416-b187-cf5548cb23bf] Unplugging VIFs for instance {{(pid=62503) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3009}} [ 807.227677] env[62503]: DEBUG oslo_concurrency.lockutils [None req-3dbf8d33-063c-4c40-92c7-9bf6b35e4526 tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] Acquiring lock "refresh_cache-32d4fda5-6d30-4416-b187-cf5548cb23bf" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 807.227828] env[62503]: DEBUG oslo_concurrency.lockutils [None req-3dbf8d33-063c-4c40-92c7-9bf6b35e4526 tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] Acquired lock "refresh_cache-32d4fda5-6d30-4416-b187-cf5548cb23bf" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 807.227988] env[62503]: DEBUG nova.network.neutron [None req-3dbf8d33-063c-4c40-92c7-9bf6b35e4526 tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] [instance: 32d4fda5-6d30-4416-b187-cf5548cb23bf] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 807.453929] env[62503]: DEBUG nova.compute.manager [None req-1bdb5a3e-b1f3-497d-a480-68bcfc62ffc1 tempest-InstanceActionsNegativeTestJSON-133759453 tempest-InstanceActionsNegativeTestJSON-133759453-project-member] [instance: 1251e59f-9c01-4115-8400-40aacedd97e2] Start spawning the instance on the hypervisor. {{(pid=62503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2641}} [ 807.459454] env[62503]: DEBUG nova.network.neutron [-] [instance: f6f17748-815c-417f-bce6-3bc97f23b637] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 807.479584] env[62503]: DEBUG nova.virt.hardware [None req-1bdb5a3e-b1f3-497d-a480-68bcfc62ffc1 tempest-InstanceActionsNegativeTestJSON-133759453 tempest-InstanceActionsNegativeTestJSON-133759453-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:26:20Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:26:03Z,direct_url=,disk_format='vmdk',id=8150ca02-f879-471d-8913-459408f127a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26d9ee75d25b4018b8bb1fb11c8bd98c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:26:04Z,virtual_size=,visibility=), allow threads: False {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 807.479824] env[62503]: DEBUG nova.virt.hardware [None req-1bdb5a3e-b1f3-497d-a480-68bcfc62ffc1 tempest-InstanceActionsNegativeTestJSON-133759453 tempest-InstanceActionsNegativeTestJSON-133759453-project-member] Flavor limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 807.479980] env[62503]: DEBUG nova.virt.hardware [None req-1bdb5a3e-b1f3-497d-a480-68bcfc62ffc1 tempest-InstanceActionsNegativeTestJSON-133759453 tempest-InstanceActionsNegativeTestJSON-133759453-project-member] Image limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 807.480183] env[62503]: DEBUG nova.virt.hardware [None req-1bdb5a3e-b1f3-497d-a480-68bcfc62ffc1 tempest-InstanceActionsNegativeTestJSON-133759453 tempest-InstanceActionsNegativeTestJSON-133759453-project-member] Flavor pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 807.480333] env[62503]: DEBUG nova.virt.hardware [None req-1bdb5a3e-b1f3-497d-a480-68bcfc62ffc1 tempest-InstanceActionsNegativeTestJSON-133759453 tempest-InstanceActionsNegativeTestJSON-133759453-project-member] Image pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 807.480483] env[62503]: DEBUG nova.virt.hardware [None req-1bdb5a3e-b1f3-497d-a480-68bcfc62ffc1 tempest-InstanceActionsNegativeTestJSON-133759453 tempest-InstanceActionsNegativeTestJSON-133759453-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 807.480705] env[62503]: DEBUG nova.virt.hardware [None req-1bdb5a3e-b1f3-497d-a480-68bcfc62ffc1 tempest-InstanceActionsNegativeTestJSON-133759453 tempest-InstanceActionsNegativeTestJSON-133759453-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 807.480848] env[62503]: DEBUG nova.virt.hardware [None req-1bdb5a3e-b1f3-497d-a480-68bcfc62ffc1 tempest-InstanceActionsNegativeTestJSON-133759453 tempest-InstanceActionsNegativeTestJSON-133759453-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 807.481026] env[62503]: DEBUG nova.virt.hardware [None req-1bdb5a3e-b1f3-497d-a480-68bcfc62ffc1 tempest-InstanceActionsNegativeTestJSON-133759453 tempest-InstanceActionsNegativeTestJSON-133759453-project-member] Got 1 possible topologies {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 807.481197] env[62503]: DEBUG nova.virt.hardware [None req-1bdb5a3e-b1f3-497d-a480-68bcfc62ffc1 tempest-InstanceActionsNegativeTestJSON-133759453 tempest-InstanceActionsNegativeTestJSON-133759453-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 807.481380] env[62503]: DEBUG nova.virt.hardware [None req-1bdb5a3e-b1f3-497d-a480-68bcfc62ffc1 tempest-InstanceActionsNegativeTestJSON-133759453 tempest-InstanceActionsNegativeTestJSON-133759453-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 807.482461] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f40046b-9261-409d-9f01-c2b9da868350 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.489949] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9575752f-0d02-4fb6-a078-3ab33c538891 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.504464] env[62503]: ERROR nova.compute.manager [None req-1bdb5a3e-b1f3-497d-a480-68bcfc62ffc1 tempest-InstanceActionsNegativeTestJSON-133759453 tempest-InstanceActionsNegativeTestJSON-133759453-project-member] [instance: 1251e59f-9c01-4115-8400-40aacedd97e2] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 0b345b9f-73a4-4907-b9be-78896c2c7c3b, please check neutron logs for more information. [ 807.504464] env[62503]: ERROR nova.compute.manager [instance: 1251e59f-9c01-4115-8400-40aacedd97e2] Traceback (most recent call last): [ 807.504464] env[62503]: ERROR nova.compute.manager [instance: 1251e59f-9c01-4115-8400-40aacedd97e2] File "/opt/stack/nova/nova/compute/manager.py", line 2897, in _build_resources [ 807.504464] env[62503]: ERROR nova.compute.manager [instance: 1251e59f-9c01-4115-8400-40aacedd97e2] yield resources [ 807.504464] env[62503]: ERROR nova.compute.manager [instance: 1251e59f-9c01-4115-8400-40aacedd97e2] File "/opt/stack/nova/nova/compute/manager.py", line 2644, in _build_and_run_instance [ 807.504464] env[62503]: ERROR nova.compute.manager [instance: 1251e59f-9c01-4115-8400-40aacedd97e2] self.driver.spawn(context, instance, image_meta, [ 807.504464] env[62503]: ERROR nova.compute.manager [instance: 1251e59f-9c01-4115-8400-40aacedd97e2] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 807.504464] env[62503]: ERROR nova.compute.manager [instance: 1251e59f-9c01-4115-8400-40aacedd97e2] self._vmops.spawn(context, instance, image_meta, injected_files, [ 807.504464] env[62503]: ERROR nova.compute.manager [instance: 1251e59f-9c01-4115-8400-40aacedd97e2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 807.504464] env[62503]: ERROR nova.compute.manager [instance: 1251e59f-9c01-4115-8400-40aacedd97e2] vm_ref = self.build_virtual_machine(instance, [ 807.504464] env[62503]: ERROR nova.compute.manager [instance: 1251e59f-9c01-4115-8400-40aacedd97e2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 807.504796] env[62503]: ERROR nova.compute.manager [instance: 1251e59f-9c01-4115-8400-40aacedd97e2] vif_infos = vmwarevif.get_vif_info(self._session, [ 807.504796] env[62503]: ERROR nova.compute.manager [instance: 1251e59f-9c01-4115-8400-40aacedd97e2] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 807.504796] env[62503]: ERROR nova.compute.manager [instance: 1251e59f-9c01-4115-8400-40aacedd97e2] for vif in network_info: [ 807.504796] env[62503]: ERROR nova.compute.manager [instance: 1251e59f-9c01-4115-8400-40aacedd97e2] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 807.504796] env[62503]: ERROR nova.compute.manager [instance: 1251e59f-9c01-4115-8400-40aacedd97e2] return self._sync_wrapper(fn, *args, **kwargs) [ 807.504796] env[62503]: ERROR nova.compute.manager [instance: 1251e59f-9c01-4115-8400-40aacedd97e2] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 807.504796] env[62503]: ERROR nova.compute.manager [instance: 1251e59f-9c01-4115-8400-40aacedd97e2] self.wait() [ 807.504796] env[62503]: ERROR nova.compute.manager [instance: 1251e59f-9c01-4115-8400-40aacedd97e2] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 807.504796] env[62503]: ERROR nova.compute.manager [instance: 1251e59f-9c01-4115-8400-40aacedd97e2] self[:] = self._gt.wait() [ 807.504796] env[62503]: ERROR nova.compute.manager [instance: 1251e59f-9c01-4115-8400-40aacedd97e2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 807.504796] env[62503]: ERROR nova.compute.manager [instance: 1251e59f-9c01-4115-8400-40aacedd97e2] return self._exit_event.wait() [ 807.504796] env[62503]: ERROR nova.compute.manager [instance: 1251e59f-9c01-4115-8400-40aacedd97e2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 807.504796] env[62503]: ERROR nova.compute.manager [instance: 1251e59f-9c01-4115-8400-40aacedd97e2] current.throw(*self._exc) [ 807.505147] env[62503]: ERROR nova.compute.manager [instance: 1251e59f-9c01-4115-8400-40aacedd97e2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 807.505147] env[62503]: ERROR nova.compute.manager [instance: 1251e59f-9c01-4115-8400-40aacedd97e2] result = function(*args, **kwargs) [ 807.505147] env[62503]: ERROR nova.compute.manager [instance: 1251e59f-9c01-4115-8400-40aacedd97e2] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 807.505147] env[62503]: ERROR nova.compute.manager [instance: 1251e59f-9c01-4115-8400-40aacedd97e2] return func(*args, **kwargs) [ 807.505147] env[62503]: ERROR nova.compute.manager [instance: 1251e59f-9c01-4115-8400-40aacedd97e2] File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 807.505147] env[62503]: ERROR nova.compute.manager [instance: 1251e59f-9c01-4115-8400-40aacedd97e2] raise e [ 807.505147] env[62503]: ERROR nova.compute.manager [instance: 1251e59f-9c01-4115-8400-40aacedd97e2] File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 807.505147] env[62503]: ERROR nova.compute.manager [instance: 1251e59f-9c01-4115-8400-40aacedd97e2] nwinfo = self.network_api.allocate_for_instance( [ 807.505147] env[62503]: ERROR nova.compute.manager [instance: 1251e59f-9c01-4115-8400-40aacedd97e2] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 807.505147] env[62503]: ERROR nova.compute.manager [instance: 1251e59f-9c01-4115-8400-40aacedd97e2] created_port_ids = self._update_ports_for_instance( [ 807.505147] env[62503]: ERROR nova.compute.manager [instance: 1251e59f-9c01-4115-8400-40aacedd97e2] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 807.505147] env[62503]: ERROR nova.compute.manager [instance: 1251e59f-9c01-4115-8400-40aacedd97e2] with excutils.save_and_reraise_exception(): [ 807.505147] env[62503]: ERROR nova.compute.manager [instance: 1251e59f-9c01-4115-8400-40aacedd97e2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 807.505524] env[62503]: ERROR nova.compute.manager [instance: 1251e59f-9c01-4115-8400-40aacedd97e2] self.force_reraise() [ 807.505524] env[62503]: ERROR nova.compute.manager [instance: 1251e59f-9c01-4115-8400-40aacedd97e2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 807.505524] env[62503]: ERROR nova.compute.manager [instance: 1251e59f-9c01-4115-8400-40aacedd97e2] raise self.value [ 807.505524] env[62503]: ERROR nova.compute.manager [instance: 1251e59f-9c01-4115-8400-40aacedd97e2] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 807.505524] env[62503]: ERROR nova.compute.manager [instance: 1251e59f-9c01-4115-8400-40aacedd97e2] updated_port = self._update_port( [ 807.505524] env[62503]: ERROR nova.compute.manager [instance: 1251e59f-9c01-4115-8400-40aacedd97e2] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 807.505524] env[62503]: ERROR nova.compute.manager [instance: 1251e59f-9c01-4115-8400-40aacedd97e2] _ensure_no_port_binding_failure(port) [ 807.505524] env[62503]: ERROR nova.compute.manager [instance: 1251e59f-9c01-4115-8400-40aacedd97e2] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 807.505524] env[62503]: ERROR nova.compute.manager [instance: 1251e59f-9c01-4115-8400-40aacedd97e2] raise exception.PortBindingFailed(port_id=port['id']) [ 807.505524] env[62503]: ERROR nova.compute.manager [instance: 1251e59f-9c01-4115-8400-40aacedd97e2] nova.exception.PortBindingFailed: Binding failed for port 0b345b9f-73a4-4907-b9be-78896c2c7c3b, please check neutron logs for more information. [ 807.505524] env[62503]: ERROR nova.compute.manager [instance: 1251e59f-9c01-4115-8400-40aacedd97e2] [ 807.505524] env[62503]: INFO nova.compute.manager [None req-1bdb5a3e-b1f3-497d-a480-68bcfc62ffc1 tempest-InstanceActionsNegativeTestJSON-133759453 tempest-InstanceActionsNegativeTestJSON-133759453-project-member] [instance: 1251e59f-9c01-4115-8400-40aacedd97e2] Terminating instance [ 807.506673] env[62503]: DEBUG oslo_concurrency.lockutils [None req-1bdb5a3e-b1f3-497d-a480-68bcfc62ffc1 tempest-InstanceActionsNegativeTestJSON-133759453 tempest-InstanceActionsNegativeTestJSON-133759453-project-member] Acquiring lock "refresh_cache-1251e59f-9c01-4115-8400-40aacedd97e2" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 807.506830] env[62503]: DEBUG oslo_concurrency.lockutils [None req-1bdb5a3e-b1f3-497d-a480-68bcfc62ffc1 tempest-InstanceActionsNegativeTestJSON-133759453 tempest-InstanceActionsNegativeTestJSON-133759453-project-member] Acquired lock "refresh_cache-1251e59f-9c01-4115-8400-40aacedd97e2" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 807.506989] env[62503]: DEBUG nova.network.neutron [None req-1bdb5a3e-b1f3-497d-a480-68bcfc62ffc1 tempest-InstanceActionsNegativeTestJSON-133759453 tempest-InstanceActionsNegativeTestJSON-133759453-project-member] [instance: 1251e59f-9c01-4115-8400-40aacedd97e2] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 807.750654] env[62503]: DEBUG nova.network.neutron [None req-3dbf8d33-063c-4c40-92c7-9bf6b35e4526 tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] [instance: 32d4fda5-6d30-4416-b187-cf5548cb23bf] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 807.849553] env[62503]: DEBUG nova.network.neutron [None req-3dbf8d33-063c-4c40-92c7-9bf6b35e4526 tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] [instance: 32d4fda5-6d30-4416-b187-cf5548cb23bf] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 807.936881] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb1424b0-85d4-41d6-b8c8-d7d4b3bf8742 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.944353] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b53525c1-3a87-434d-9ae0-0bdfab2c1f70 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.973918] env[62503]: INFO nova.compute.manager [-] [instance: f6f17748-815c-417f-bce6-3bc97f23b637] Took 1.03 seconds to deallocate network for instance. [ 807.976366] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec5e0fc3-008f-437d-9a73-fdc92127e3b2 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.978934] env[62503]: DEBUG nova.compute.claims [None req-795b6309-d5a9-40f9-8a07-369f392b468e tempest-ServersTestManualDisk-925778780 tempest-ServersTestManualDisk-925778780-project-member] [instance: f6f17748-815c-417f-bce6-3bc97f23b637] Aborting claim: {{(pid=62503) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 807.979130] env[62503]: DEBUG oslo_concurrency.lockutils [None req-795b6309-d5a9-40f9-8a07-369f392b468e tempest-ServersTestManualDisk-925778780 tempest-ServersTestManualDisk-925778780-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 807.984072] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a31c8e00-8ee7-477a-87b9-f20dabc1480d {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.996693] env[62503]: DEBUG nova.compute.provider_tree [None req-bed06986-6337-47b4-a3da-32c888b74905 tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 808.024271] env[62503]: DEBUG nova.network.neutron [None req-1bdb5a3e-b1f3-497d-a480-68bcfc62ffc1 tempest-InstanceActionsNegativeTestJSON-133759453 tempest-InstanceActionsNegativeTestJSON-133759453-project-member] [instance: 1251e59f-9c01-4115-8400-40aacedd97e2] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 808.116930] env[62503]: DEBUG nova.network.neutron [None req-1bdb5a3e-b1f3-497d-a480-68bcfc62ffc1 tempest-InstanceActionsNegativeTestJSON-133759453 tempest-InstanceActionsNegativeTestJSON-133759453-project-member] [instance: 1251e59f-9c01-4115-8400-40aacedd97e2] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 808.352258] env[62503]: DEBUG oslo_concurrency.lockutils [None req-3dbf8d33-063c-4c40-92c7-9bf6b35e4526 tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] Releasing lock "refresh_cache-32d4fda5-6d30-4416-b187-cf5548cb23bf" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 808.352508] env[62503]: DEBUG nova.compute.manager [None req-3dbf8d33-063c-4c40-92c7-9bf6b35e4526 tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62503) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3032}} [ 808.352730] env[62503]: DEBUG nova.compute.manager [None req-3dbf8d33-063c-4c40-92c7-9bf6b35e4526 tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] [instance: 32d4fda5-6d30-4416-b187-cf5548cb23bf] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 808.352910] env[62503]: DEBUG nova.network.neutron [None req-3dbf8d33-063c-4c40-92c7-9bf6b35e4526 tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] [instance: 32d4fda5-6d30-4416-b187-cf5548cb23bf] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 808.367591] env[62503]: DEBUG nova.network.neutron [None req-3dbf8d33-063c-4c40-92c7-9bf6b35e4526 tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] [instance: 32d4fda5-6d30-4416-b187-cf5548cb23bf] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 808.499469] env[62503]: DEBUG nova.scheduler.client.report [None req-bed06986-6337-47b4-a3da-32c888b74905 tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 808.619651] env[62503]: DEBUG oslo_concurrency.lockutils [None req-1bdb5a3e-b1f3-497d-a480-68bcfc62ffc1 tempest-InstanceActionsNegativeTestJSON-133759453 tempest-InstanceActionsNegativeTestJSON-133759453-project-member] Releasing lock "refresh_cache-1251e59f-9c01-4115-8400-40aacedd97e2" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 808.620073] env[62503]: DEBUG nova.compute.manager [None req-1bdb5a3e-b1f3-497d-a480-68bcfc62ffc1 tempest-InstanceActionsNegativeTestJSON-133759453 tempest-InstanceActionsNegativeTestJSON-133759453-project-member] [instance: 1251e59f-9c01-4115-8400-40aacedd97e2] Start destroying the instance on the hypervisor. {{(pid=62503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3156}} [ 808.620302] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-1bdb5a3e-b1f3-497d-a480-68bcfc62ffc1 tempest-InstanceActionsNegativeTestJSON-133759453 tempest-InstanceActionsNegativeTestJSON-133759453-project-member] [instance: 1251e59f-9c01-4115-8400-40aacedd97e2] Destroying instance {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 808.620587] env[62503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-64f89d70-dfad-4834-8751-0ddfca9185fe {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.629947] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3df295d9-2bbc-4582-8739-17042509e209 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.650760] env[62503]: WARNING nova.virt.vmwareapi.vmops [None req-1bdb5a3e-b1f3-497d-a480-68bcfc62ffc1 tempest-InstanceActionsNegativeTestJSON-133759453 tempest-InstanceActionsNegativeTestJSON-133759453-project-member] [instance: 1251e59f-9c01-4115-8400-40aacedd97e2] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 1251e59f-9c01-4115-8400-40aacedd97e2 could not be found. [ 808.650981] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-1bdb5a3e-b1f3-497d-a480-68bcfc62ffc1 tempest-InstanceActionsNegativeTestJSON-133759453 tempest-InstanceActionsNegativeTestJSON-133759453-project-member] [instance: 1251e59f-9c01-4115-8400-40aacedd97e2] Instance destroyed {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 808.651181] env[62503]: INFO nova.compute.manager [None req-1bdb5a3e-b1f3-497d-a480-68bcfc62ffc1 tempest-InstanceActionsNegativeTestJSON-133759453 tempest-InstanceActionsNegativeTestJSON-133759453-project-member] [instance: 1251e59f-9c01-4115-8400-40aacedd97e2] Took 0.03 seconds to destroy the instance on the hypervisor. [ 808.651433] env[62503]: DEBUG oslo.service.loopingcall [None req-1bdb5a3e-b1f3-497d-a480-68bcfc62ffc1 tempest-InstanceActionsNegativeTestJSON-133759453 tempest-InstanceActionsNegativeTestJSON-133759453-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 808.651644] env[62503]: DEBUG nova.compute.manager [-] [instance: 1251e59f-9c01-4115-8400-40aacedd97e2] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 808.651739] env[62503]: DEBUG nova.network.neutron [-] [instance: 1251e59f-9c01-4115-8400-40aacedd97e2] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 808.664874] env[62503]: DEBUG nova.network.neutron [-] [instance: 1251e59f-9c01-4115-8400-40aacedd97e2] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 808.753544] env[62503]: DEBUG nova.compute.manager [req-63a54050-7be0-4b45-8cfe-e2544d8720c9 req-6938ed16-b6c8-491a-87ac-fbe29d8e54d6 service nova] [instance: 1251e59f-9c01-4115-8400-40aacedd97e2] Received event network-changed-0b345b9f-73a4-4907-b9be-78896c2c7c3b {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 808.753767] env[62503]: DEBUG nova.compute.manager [req-63a54050-7be0-4b45-8cfe-e2544d8720c9 req-6938ed16-b6c8-491a-87ac-fbe29d8e54d6 service nova] [instance: 1251e59f-9c01-4115-8400-40aacedd97e2] Refreshing instance network info cache due to event network-changed-0b345b9f-73a4-4907-b9be-78896c2c7c3b. {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11432}} [ 808.753986] env[62503]: DEBUG oslo_concurrency.lockutils [req-63a54050-7be0-4b45-8cfe-e2544d8720c9 req-6938ed16-b6c8-491a-87ac-fbe29d8e54d6 service nova] Acquiring lock "refresh_cache-1251e59f-9c01-4115-8400-40aacedd97e2" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 808.754143] env[62503]: DEBUG oslo_concurrency.lockutils [req-63a54050-7be0-4b45-8cfe-e2544d8720c9 req-6938ed16-b6c8-491a-87ac-fbe29d8e54d6 service nova] Acquired lock "refresh_cache-1251e59f-9c01-4115-8400-40aacedd97e2" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 808.754300] env[62503]: DEBUG nova.network.neutron [req-63a54050-7be0-4b45-8cfe-e2544d8720c9 req-6938ed16-b6c8-491a-87ac-fbe29d8e54d6 service nova] [instance: 1251e59f-9c01-4115-8400-40aacedd97e2] Refreshing network info cache for port 0b345b9f-73a4-4907-b9be-78896c2c7c3b {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 808.870094] env[62503]: DEBUG nova.network.neutron [None req-3dbf8d33-063c-4c40-92c7-9bf6b35e4526 tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] [instance: 32d4fda5-6d30-4416-b187-cf5548cb23bf] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 809.004506] env[62503]: DEBUG oslo_concurrency.lockutils [None req-bed06986-6337-47b4-a3da-32c888b74905 tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.780s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 809.004831] env[62503]: ERROR nova.compute.manager [None req-bed06986-6337-47b4-a3da-32c888b74905 tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] [instance: a4b600d2-b411-4957-92cb-7e8e462fde1d] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 884f6499-41ed-4dce-9197-75e5aaf3e2ce, please check neutron logs for more information. [ 809.004831] env[62503]: ERROR nova.compute.manager [instance: a4b600d2-b411-4957-92cb-7e8e462fde1d] Traceback (most recent call last): [ 809.004831] env[62503]: ERROR nova.compute.manager [instance: a4b600d2-b411-4957-92cb-7e8e462fde1d] File "/opt/stack/nova/nova/compute/manager.py", line 2644, in _build_and_run_instance [ 809.004831] env[62503]: ERROR nova.compute.manager [instance: a4b600d2-b411-4957-92cb-7e8e462fde1d] self.driver.spawn(context, instance, image_meta, [ 809.004831] env[62503]: ERROR nova.compute.manager [instance: a4b600d2-b411-4957-92cb-7e8e462fde1d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 809.004831] env[62503]: ERROR nova.compute.manager [instance: a4b600d2-b411-4957-92cb-7e8e462fde1d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 809.004831] env[62503]: ERROR nova.compute.manager [instance: a4b600d2-b411-4957-92cb-7e8e462fde1d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 809.004831] env[62503]: ERROR nova.compute.manager [instance: a4b600d2-b411-4957-92cb-7e8e462fde1d] vm_ref = self.build_virtual_machine(instance, [ 809.004831] env[62503]: ERROR nova.compute.manager [instance: a4b600d2-b411-4957-92cb-7e8e462fde1d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 809.004831] env[62503]: ERROR nova.compute.manager [instance: a4b600d2-b411-4957-92cb-7e8e462fde1d] vif_infos = vmwarevif.get_vif_info(self._session, [ 809.004831] env[62503]: ERROR nova.compute.manager [instance: a4b600d2-b411-4957-92cb-7e8e462fde1d] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 809.005130] env[62503]: ERROR nova.compute.manager [instance: a4b600d2-b411-4957-92cb-7e8e462fde1d] for vif in network_info: [ 809.005130] env[62503]: ERROR nova.compute.manager [instance: a4b600d2-b411-4957-92cb-7e8e462fde1d] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 809.005130] env[62503]: ERROR nova.compute.manager [instance: a4b600d2-b411-4957-92cb-7e8e462fde1d] return self._sync_wrapper(fn, *args, **kwargs) [ 809.005130] env[62503]: ERROR nova.compute.manager [instance: a4b600d2-b411-4957-92cb-7e8e462fde1d] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 809.005130] env[62503]: ERROR nova.compute.manager [instance: a4b600d2-b411-4957-92cb-7e8e462fde1d] self.wait() [ 809.005130] env[62503]: ERROR nova.compute.manager [instance: a4b600d2-b411-4957-92cb-7e8e462fde1d] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 809.005130] env[62503]: ERROR nova.compute.manager [instance: a4b600d2-b411-4957-92cb-7e8e462fde1d] self[:] = self._gt.wait() [ 809.005130] env[62503]: ERROR nova.compute.manager [instance: a4b600d2-b411-4957-92cb-7e8e462fde1d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 809.005130] env[62503]: ERROR nova.compute.manager [instance: a4b600d2-b411-4957-92cb-7e8e462fde1d] return self._exit_event.wait() [ 809.005130] env[62503]: ERROR nova.compute.manager [instance: a4b600d2-b411-4957-92cb-7e8e462fde1d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 809.005130] env[62503]: ERROR nova.compute.manager [instance: a4b600d2-b411-4957-92cb-7e8e462fde1d] current.throw(*self._exc) [ 809.005130] env[62503]: ERROR nova.compute.manager [instance: a4b600d2-b411-4957-92cb-7e8e462fde1d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 809.005130] env[62503]: ERROR nova.compute.manager [instance: a4b600d2-b411-4957-92cb-7e8e462fde1d] result = function(*args, **kwargs) [ 809.005407] env[62503]: ERROR nova.compute.manager [instance: a4b600d2-b411-4957-92cb-7e8e462fde1d] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 809.005407] env[62503]: ERROR nova.compute.manager [instance: a4b600d2-b411-4957-92cb-7e8e462fde1d] return func(*args, **kwargs) [ 809.005407] env[62503]: ERROR nova.compute.manager [instance: a4b600d2-b411-4957-92cb-7e8e462fde1d] File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 809.005407] env[62503]: ERROR nova.compute.manager [instance: a4b600d2-b411-4957-92cb-7e8e462fde1d] raise e [ 809.005407] env[62503]: ERROR nova.compute.manager [instance: a4b600d2-b411-4957-92cb-7e8e462fde1d] File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 809.005407] env[62503]: ERROR nova.compute.manager [instance: a4b600d2-b411-4957-92cb-7e8e462fde1d] nwinfo = self.network_api.allocate_for_instance( [ 809.005407] env[62503]: ERROR nova.compute.manager [instance: a4b600d2-b411-4957-92cb-7e8e462fde1d] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 809.005407] env[62503]: ERROR nova.compute.manager [instance: a4b600d2-b411-4957-92cb-7e8e462fde1d] created_port_ids = self._update_ports_for_instance( [ 809.005407] env[62503]: ERROR nova.compute.manager [instance: a4b600d2-b411-4957-92cb-7e8e462fde1d] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 809.005407] env[62503]: ERROR nova.compute.manager [instance: a4b600d2-b411-4957-92cb-7e8e462fde1d] with excutils.save_and_reraise_exception(): [ 809.005407] env[62503]: ERROR nova.compute.manager [instance: a4b600d2-b411-4957-92cb-7e8e462fde1d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 809.005407] env[62503]: ERROR nova.compute.manager [instance: a4b600d2-b411-4957-92cb-7e8e462fde1d] self.force_reraise() [ 809.005407] env[62503]: ERROR nova.compute.manager [instance: a4b600d2-b411-4957-92cb-7e8e462fde1d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 809.005686] env[62503]: ERROR nova.compute.manager [instance: a4b600d2-b411-4957-92cb-7e8e462fde1d] raise self.value [ 809.005686] env[62503]: ERROR nova.compute.manager [instance: a4b600d2-b411-4957-92cb-7e8e462fde1d] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 809.005686] env[62503]: ERROR nova.compute.manager [instance: a4b600d2-b411-4957-92cb-7e8e462fde1d] updated_port = self._update_port( [ 809.005686] env[62503]: ERROR nova.compute.manager [instance: a4b600d2-b411-4957-92cb-7e8e462fde1d] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 809.005686] env[62503]: ERROR nova.compute.manager [instance: a4b600d2-b411-4957-92cb-7e8e462fde1d] _ensure_no_port_binding_failure(port) [ 809.005686] env[62503]: ERROR nova.compute.manager [instance: a4b600d2-b411-4957-92cb-7e8e462fde1d] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 809.005686] env[62503]: ERROR nova.compute.manager [instance: a4b600d2-b411-4957-92cb-7e8e462fde1d] raise exception.PortBindingFailed(port_id=port['id']) [ 809.005686] env[62503]: ERROR nova.compute.manager [instance: a4b600d2-b411-4957-92cb-7e8e462fde1d] nova.exception.PortBindingFailed: Binding failed for port 884f6499-41ed-4dce-9197-75e5aaf3e2ce, please check neutron logs for more information. [ 809.005686] env[62503]: ERROR nova.compute.manager [instance: a4b600d2-b411-4957-92cb-7e8e462fde1d] [ 809.005686] env[62503]: DEBUG nova.compute.utils [None req-bed06986-6337-47b4-a3da-32c888b74905 tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] [instance: a4b600d2-b411-4957-92cb-7e8e462fde1d] Binding failed for port 884f6499-41ed-4dce-9197-75e5aaf3e2ce, please check neutron logs for more information. {{(pid=62503) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 809.007227] env[62503]: DEBUG nova.compute.manager [None req-bed06986-6337-47b4-a3da-32c888b74905 tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] [instance: a4b600d2-b411-4957-92cb-7e8e462fde1d] Build of instance a4b600d2-b411-4957-92cb-7e8e462fde1d was re-scheduled: Binding failed for port 884f6499-41ed-4dce-9197-75e5aaf3e2ce, please check neutron logs for more information. {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2483}} [ 809.007635] env[62503]: DEBUG nova.compute.manager [None req-bed06986-6337-47b4-a3da-32c888b74905 tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] [instance: a4b600d2-b411-4957-92cb-7e8e462fde1d] Unplugging VIFs for instance {{(pid=62503) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3009}} [ 809.007864] env[62503]: DEBUG oslo_concurrency.lockutils [None req-bed06986-6337-47b4-a3da-32c888b74905 tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] Acquiring lock "refresh_cache-a4b600d2-b411-4957-92cb-7e8e462fde1d" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 809.008021] env[62503]: DEBUG oslo_concurrency.lockutils [None req-bed06986-6337-47b4-a3da-32c888b74905 tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] Acquired lock "refresh_cache-a4b600d2-b411-4957-92cb-7e8e462fde1d" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 809.008182] env[62503]: DEBUG nova.network.neutron [None req-bed06986-6337-47b4-a3da-32c888b74905 tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] [instance: a4b600d2-b411-4957-92cb-7e8e462fde1d] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 809.009165] env[62503]: DEBUG oslo_concurrency.lockutils [None req-0e26226b-2af4-47a7-8eef-0a1c99867cff tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.789s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 809.011128] env[62503]: INFO nova.compute.claims [None req-0e26226b-2af4-47a7-8eef-0a1c99867cff tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] [instance: 86422990-4215-4628-a7a7-4fdc910e304e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 809.167466] env[62503]: DEBUG nova.network.neutron [-] [instance: 1251e59f-9c01-4115-8400-40aacedd97e2] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 809.274344] env[62503]: DEBUG nova.network.neutron [req-63a54050-7be0-4b45-8cfe-e2544d8720c9 req-6938ed16-b6c8-491a-87ac-fbe29d8e54d6 service nova] [instance: 1251e59f-9c01-4115-8400-40aacedd97e2] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 809.354769] env[62503]: DEBUG nova.network.neutron [req-63a54050-7be0-4b45-8cfe-e2544d8720c9 req-6938ed16-b6c8-491a-87ac-fbe29d8e54d6 service nova] [instance: 1251e59f-9c01-4115-8400-40aacedd97e2] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 809.373033] env[62503]: INFO nova.compute.manager [None req-3dbf8d33-063c-4c40-92c7-9bf6b35e4526 tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] [instance: 32d4fda5-6d30-4416-b187-cf5548cb23bf] Took 1.02 seconds to deallocate network for instance. [ 809.530671] env[62503]: DEBUG nova.network.neutron [None req-bed06986-6337-47b4-a3da-32c888b74905 tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] [instance: a4b600d2-b411-4957-92cb-7e8e462fde1d] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 809.596175] env[62503]: DEBUG nova.network.neutron [None req-bed06986-6337-47b4-a3da-32c888b74905 tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] [instance: a4b600d2-b411-4957-92cb-7e8e462fde1d] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 809.670085] env[62503]: INFO nova.compute.manager [-] [instance: 1251e59f-9c01-4115-8400-40aacedd97e2] Took 1.02 seconds to deallocate network for instance. [ 809.673812] env[62503]: DEBUG nova.compute.claims [None req-1bdb5a3e-b1f3-497d-a480-68bcfc62ffc1 tempest-InstanceActionsNegativeTestJSON-133759453 tempest-InstanceActionsNegativeTestJSON-133759453-project-member] [instance: 1251e59f-9c01-4115-8400-40aacedd97e2] Aborting claim: {{(pid=62503) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 809.673993] env[62503]: DEBUG oslo_concurrency.lockutils [None req-1bdb5a3e-b1f3-497d-a480-68bcfc62ffc1 tempest-InstanceActionsNegativeTestJSON-133759453 tempest-InstanceActionsNegativeTestJSON-133759453-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 809.857941] env[62503]: DEBUG oslo_concurrency.lockutils [req-63a54050-7be0-4b45-8cfe-e2544d8720c9 req-6938ed16-b6c8-491a-87ac-fbe29d8e54d6 service nova] Releasing lock "refresh_cache-1251e59f-9c01-4115-8400-40aacedd97e2" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 809.858408] env[62503]: DEBUG nova.compute.manager [req-63a54050-7be0-4b45-8cfe-e2544d8720c9 req-6938ed16-b6c8-491a-87ac-fbe29d8e54d6 service nova] [instance: 1251e59f-9c01-4115-8400-40aacedd97e2] Received event network-vif-deleted-0b345b9f-73a4-4907-b9be-78896c2c7c3b {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 810.098504] env[62503]: DEBUG oslo_concurrency.lockutils [None req-bed06986-6337-47b4-a3da-32c888b74905 tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] Releasing lock "refresh_cache-a4b600d2-b411-4957-92cb-7e8e462fde1d" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 810.098713] env[62503]: DEBUG nova.compute.manager [None req-bed06986-6337-47b4-a3da-32c888b74905 tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62503) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3032}} [ 810.098894] env[62503]: DEBUG nova.compute.manager [None req-bed06986-6337-47b4-a3da-32c888b74905 tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] [instance: a4b600d2-b411-4957-92cb-7e8e462fde1d] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 810.099079] env[62503]: DEBUG nova.network.neutron [None req-bed06986-6337-47b4-a3da-32c888b74905 tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] [instance: a4b600d2-b411-4957-92cb-7e8e462fde1d] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 810.113853] env[62503]: DEBUG nova.network.neutron [None req-bed06986-6337-47b4-a3da-32c888b74905 tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] [instance: a4b600d2-b411-4957-92cb-7e8e462fde1d] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 810.214105] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26f4d273-05d7-4fed-85db-129ee4d9e233 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.221573] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b653587e-571e-4236-ba51-84ecf2956ae2 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.250192] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e2d8141-fdfe-4e2b-be08-629040808474 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.257341] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b998a33-3688-44f4-9f41-a9ca489c2af5 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.270090] env[62503]: DEBUG nova.compute.provider_tree [None req-0e26226b-2af4-47a7-8eef-0a1c99867cff tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 810.401507] env[62503]: INFO nova.scheduler.client.report [None req-3dbf8d33-063c-4c40-92c7-9bf6b35e4526 tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] Deleted allocations for instance 32d4fda5-6d30-4416-b187-cf5548cb23bf [ 810.616505] env[62503]: DEBUG nova.network.neutron [None req-bed06986-6337-47b4-a3da-32c888b74905 tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] [instance: a4b600d2-b411-4957-92cb-7e8e462fde1d] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 810.772908] env[62503]: DEBUG nova.scheduler.client.report [None req-0e26226b-2af4-47a7-8eef-0a1c99867cff tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 810.910089] env[62503]: DEBUG oslo_concurrency.lockutils [None req-3dbf8d33-063c-4c40-92c7-9bf6b35e4526 tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] Lock "32d4fda5-6d30-4416-b187-cf5548cb23bf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 128.192s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 811.120775] env[62503]: INFO nova.compute.manager [None req-bed06986-6337-47b4-a3da-32c888b74905 tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] [instance: a4b600d2-b411-4957-92cb-7e8e462fde1d] Took 1.02 seconds to deallocate network for instance. [ 811.278050] env[62503]: DEBUG oslo_concurrency.lockutils [None req-0e26226b-2af4-47a7-8eef-0a1c99867cff tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.269s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 811.278432] env[62503]: DEBUG nova.compute.manager [None req-0e26226b-2af4-47a7-8eef-0a1c99867cff tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] [instance: 86422990-4215-4628-a7a7-4fdc910e304e] Start building networks asynchronously for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2832}} [ 811.281111] env[62503]: DEBUG oslo_concurrency.lockutils [None req-337e5891-71cd-4f0a-a7c5-d668c349bcf9 tempest-ServersAaction247Test-815974925 tempest-ServersAaction247Test-815974925-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.224s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 811.282826] env[62503]: INFO nova.compute.claims [None req-337e5891-71cd-4f0a-a7c5-d668c349bcf9 tempest-ServersAaction247Test-815974925 tempest-ServersAaction247Test-815974925-project-member] [instance: ef92e4ba-4ef3-4e26-9577-bad0c046ed47] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 811.413333] env[62503]: DEBUG nova.compute.manager [None req-d66ce770-1029-4d34-900e-8c450c39e993 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: 4deb28e7-351b-41b7-90bb-afdde200f7fa] Starting instance... {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 811.787271] env[62503]: DEBUG nova.compute.utils [None req-0e26226b-2af4-47a7-8eef-0a1c99867cff tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Using /dev/sd instead of None {{(pid=62503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 811.791207] env[62503]: DEBUG nova.compute.manager [None req-0e26226b-2af4-47a7-8eef-0a1c99867cff tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] [instance: 86422990-4215-4628-a7a7-4fdc910e304e] Not allocating networking since 'none' was specified. {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1980}} [ 811.936565] env[62503]: DEBUG oslo_concurrency.lockutils [None req-d66ce770-1029-4d34-900e-8c450c39e993 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 812.147592] env[62503]: INFO nova.scheduler.client.report [None req-bed06986-6337-47b4-a3da-32c888b74905 tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] Deleted allocations for instance a4b600d2-b411-4957-92cb-7e8e462fde1d [ 812.293643] env[62503]: DEBUG nova.compute.manager [None req-0e26226b-2af4-47a7-8eef-0a1c99867cff tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] [instance: 86422990-4215-4628-a7a7-4fdc910e304e] Start building block device mappings for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2867}} [ 812.547738] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e457eb6-a09e-42de-b9f3-f062de55a236 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.555646] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4388c26-58f2-4b83-838a-0ae3ca98a81a {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.585704] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-477d374c-8e45-4a4b-b790-04c8c5fbd75d {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.593616] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9b291bb-13cb-47b4-8112-57e48c32a859 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.606543] env[62503]: DEBUG nova.compute.provider_tree [None req-337e5891-71cd-4f0a-a7c5-d668c349bcf9 tempest-ServersAaction247Test-815974925 tempest-ServersAaction247Test-815974925-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 812.657026] env[62503]: DEBUG oslo_concurrency.lockutils [None req-bed06986-6337-47b4-a3da-32c888b74905 tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] Lock "a4b600d2-b411-4957-92cb-7e8e462fde1d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 116.213s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 813.110275] env[62503]: DEBUG nova.scheduler.client.report [None req-337e5891-71cd-4f0a-a7c5-d668c349bcf9 tempest-ServersAaction247Test-815974925 tempest-ServersAaction247Test-815974925-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 813.158714] env[62503]: DEBUG nova.compute.manager [None req-1d96ce89-6413-4ef6-9cd7-d46364d67260 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028-project-member] [instance: 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2] Starting instance... {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 813.307985] env[62503]: DEBUG nova.compute.manager [None req-0e26226b-2af4-47a7-8eef-0a1c99867cff tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] [instance: 86422990-4215-4628-a7a7-4fdc910e304e] Start spawning the instance on the hypervisor. {{(pid=62503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2641}} [ 813.329210] env[62503]: DEBUG nova.virt.hardware [None req-0e26226b-2af4-47a7-8eef-0a1c99867cff tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:26:20Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:26:03Z,direct_url=,disk_format='vmdk',id=8150ca02-f879-471d-8913-459408f127a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26d9ee75d25b4018b8bb1fb11c8bd98c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:26:04Z,virtual_size=,visibility=), allow threads: False {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 813.329529] env[62503]: DEBUG nova.virt.hardware [None req-0e26226b-2af4-47a7-8eef-0a1c99867cff tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Flavor limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 813.329699] env[62503]: DEBUG nova.virt.hardware [None req-0e26226b-2af4-47a7-8eef-0a1c99867cff tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Image limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 813.329889] env[62503]: DEBUG nova.virt.hardware [None req-0e26226b-2af4-47a7-8eef-0a1c99867cff tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Flavor pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 813.330053] env[62503]: DEBUG nova.virt.hardware [None req-0e26226b-2af4-47a7-8eef-0a1c99867cff tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Image pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 813.330207] env[62503]: DEBUG nova.virt.hardware [None req-0e26226b-2af4-47a7-8eef-0a1c99867cff tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 813.330441] env[62503]: DEBUG nova.virt.hardware [None req-0e26226b-2af4-47a7-8eef-0a1c99867cff tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 813.330616] env[62503]: DEBUG nova.virt.hardware [None req-0e26226b-2af4-47a7-8eef-0a1c99867cff tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 813.330785] env[62503]: DEBUG nova.virt.hardware [None req-0e26226b-2af4-47a7-8eef-0a1c99867cff tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Got 1 possible topologies {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 813.330948] env[62503]: DEBUG nova.virt.hardware [None req-0e26226b-2af4-47a7-8eef-0a1c99867cff tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 813.331136] env[62503]: DEBUG nova.virt.hardware [None req-0e26226b-2af4-47a7-8eef-0a1c99867cff tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 813.331988] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd35870b-cf6d-4a42-8556-60a3d338acac {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.340164] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a14664ab-bac0-43ff-8709-bf69ee554b84 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.355054] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-0e26226b-2af4-47a7-8eef-0a1c99867cff tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] [instance: 86422990-4215-4628-a7a7-4fdc910e304e] Instance VIF info [] {{(pid=62503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 813.361235] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e26226b-2af4-47a7-8eef-0a1c99867cff tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Creating folder: Project (4c83962234424e959cbdf17caf6664c6). Parent ref: group-v294540. {{(pid=62503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 813.361975] env[62503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bf494b90-c32f-4eb2-8284-b5d9c9a446d2 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.374468] env[62503]: INFO nova.virt.vmwareapi.vm_util [None req-0e26226b-2af4-47a7-8eef-0a1c99867cff tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Created folder: Project (4c83962234424e959cbdf17caf6664c6) in parent group-v294540. [ 813.374820] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e26226b-2af4-47a7-8eef-0a1c99867cff tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Creating folder: Instances. Parent ref: group-v294562. {{(pid=62503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 813.375102] env[62503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0214cb68-54e6-4249-9340-a1cc188c94c6 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.383897] env[62503]: INFO nova.virt.vmwareapi.vm_util [None req-0e26226b-2af4-47a7-8eef-0a1c99867cff tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Created folder: Instances in parent group-v294562. [ 813.384141] env[62503]: DEBUG oslo.service.loopingcall [None req-0e26226b-2af4-47a7-8eef-0a1c99867cff tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 813.384329] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 86422990-4215-4628-a7a7-4fdc910e304e] Creating VM on the ESX host {{(pid=62503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 813.384522] env[62503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f70eaf96-d55a-418c-8220-67a4627e4d9a {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.401092] env[62503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 813.401092] env[62503]: value = "task-1387758" [ 813.401092] env[62503]: _type = "Task" [ 813.401092] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 813.408198] env[62503]: DEBUG oslo_vmware.api [-] Task: {'id': task-1387758, 'name': CreateVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 813.615825] env[62503]: DEBUG oslo_concurrency.lockutils [None req-337e5891-71cd-4f0a-a7c5-d668c349bcf9 tempest-ServersAaction247Test-815974925 tempest-ServersAaction247Test-815974925-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.334s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 813.615825] env[62503]: DEBUG nova.compute.manager [None req-337e5891-71cd-4f0a-a7c5-d668c349bcf9 tempest-ServersAaction247Test-815974925 tempest-ServersAaction247Test-815974925-project-member] [instance: ef92e4ba-4ef3-4e26-9577-bad0c046ed47] Start building networks asynchronously for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2832}} [ 813.618667] env[62503]: DEBUG oslo_concurrency.lockutils [None req-47a18854-ca82-4c3a-9a6e-798e903be44b tempest-ServerRescueTestJSONUnderV235-862506334 tempest-ServerRescueTestJSONUnderV235-862506334-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.923s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 813.621751] env[62503]: INFO nova.compute.claims [None req-47a18854-ca82-4c3a-9a6e-798e903be44b tempest-ServerRescueTestJSONUnderV235-862506334 tempest-ServerRescueTestJSONUnderV235-862506334-project-member] [instance: 4cb117e3-ff57-4e7f-bb2b-a12c988e362c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 813.689020] env[62503]: DEBUG oslo_concurrency.lockutils [None req-1d96ce89-6413-4ef6-9cd7-d46364d67260 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 813.913287] env[62503]: DEBUG oslo_vmware.api [-] Task: {'id': task-1387758, 'name': CreateVM_Task} progress is 99%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 814.125414] env[62503]: DEBUG nova.compute.utils [None req-337e5891-71cd-4f0a-a7c5-d668c349bcf9 tempest-ServersAaction247Test-815974925 tempest-ServersAaction247Test-815974925-project-member] Using /dev/sd instead of None {{(pid=62503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 814.126889] env[62503]: DEBUG nova.compute.manager [None req-337e5891-71cd-4f0a-a7c5-d668c349bcf9 tempest-ServersAaction247Test-815974925 tempest-ServersAaction247Test-815974925-project-member] [instance: ef92e4ba-4ef3-4e26-9577-bad0c046ed47] Not allocating networking since 'none' was specified. {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1980}} [ 814.245217] env[62503]: DEBUG oslo_concurrency.lockutils [None req-242ac6f9-00b2-4413-9a56-88c712c0138b tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] Acquiring lock "6229dda6-90e8-457b-beb3-2107e3700b29" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 814.245462] env[62503]: DEBUG oslo_concurrency.lockutils [None req-242ac6f9-00b2-4413-9a56-88c712c0138b tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] Lock "6229dda6-90e8-457b-beb3-2107e3700b29" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 814.414640] env[62503]: DEBUG oslo_vmware.api [-] Task: {'id': task-1387758, 'name': CreateVM_Task} progress is 99%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 814.630613] env[62503]: DEBUG nova.compute.manager [None req-337e5891-71cd-4f0a-a7c5-d668c349bcf9 tempest-ServersAaction247Test-815974925 tempest-ServersAaction247Test-815974925-project-member] [instance: ef92e4ba-4ef3-4e26-9577-bad0c046ed47] Start building block device mappings for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2867}} [ 814.838291] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-304652a9-69e4-4d9c-ba8d-86aa316bb854 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.845925] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08162323-38db-45d7-a783-eae4a0068724 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.887914] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d17076b-e403-40aa-a791-14c248c76f20 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.898987] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7072e5c-f30b-4000-a2d8-59518c9479ae {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.921865] env[62503]: DEBUG nova.compute.provider_tree [None req-47a18854-ca82-4c3a-9a6e-798e903be44b tempest-ServerRescueTestJSONUnderV235-862506334 tempest-ServerRescueTestJSONUnderV235-862506334-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 814.930071] env[62503]: DEBUG oslo_vmware.api [-] Task: {'id': task-1387758, 'name': CreateVM_Task, 'duration_secs': 1.266077} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 814.931924] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 86422990-4215-4628-a7a7-4fdc910e304e] Created VM on the ESX host {{(pid=62503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 814.931924] env[62503]: DEBUG oslo_concurrency.lockutils [None req-0e26226b-2af4-47a7-8eef-0a1c99867cff tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 814.931924] env[62503]: DEBUG oslo_concurrency.lockutils [None req-0e26226b-2af4-47a7-8eef-0a1c99867cff tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Acquired lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 814.931924] env[62503]: DEBUG oslo_concurrency.lockutils [None req-0e26226b-2af4-47a7-8eef-0a1c99867cff tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 814.931924] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0e7bdbc7-cb24-4762-b20a-21f37b13eefe {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.940019] env[62503]: DEBUG oslo_vmware.api [None req-0e26226b-2af4-47a7-8eef-0a1c99867cff tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Waiting for the task: (returnval){ [ 814.940019] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]5247733f-f588-28dd-0e4e-a1f1c0780b3c" [ 814.940019] env[62503]: _type = "Task" [ 814.940019] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 814.944866] env[62503]: DEBUG oslo_vmware.api [None req-0e26226b-2af4-47a7-8eef-0a1c99867cff tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]5247733f-f588-28dd-0e4e-a1f1c0780b3c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 815.431714] env[62503]: DEBUG nova.scheduler.client.report [None req-47a18854-ca82-4c3a-9a6e-798e903be44b tempest-ServerRescueTestJSONUnderV235-862506334 tempest-ServerRescueTestJSONUnderV235-862506334-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 815.446945] env[62503]: DEBUG oslo_vmware.api [None req-0e26226b-2af4-47a7-8eef-0a1c99867cff tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]5247733f-f588-28dd-0e4e-a1f1c0780b3c, 'name': SearchDatastore_Task, 'duration_secs': 0.011975} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 815.447273] env[62503]: DEBUG oslo_concurrency.lockutils [None req-0e26226b-2af4-47a7-8eef-0a1c99867cff tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Releasing lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 815.447532] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-0e26226b-2af4-47a7-8eef-0a1c99867cff tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] [instance: 86422990-4215-4628-a7a7-4fdc910e304e] Processing image 8150ca02-f879-471d-8913-459408f127a1 {{(pid=62503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 815.447772] env[62503]: DEBUG oslo_concurrency.lockutils [None req-0e26226b-2af4-47a7-8eef-0a1c99867cff tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 815.447920] env[62503]: DEBUG oslo_concurrency.lockutils [None req-0e26226b-2af4-47a7-8eef-0a1c99867cff tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Acquired lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 815.448173] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-0e26226b-2af4-47a7-8eef-0a1c99867cff tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 815.448911] env[62503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-49f1cbed-7dc9-44dc-9bc6-1341fdde73ee {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.456918] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-0e26226b-2af4-47a7-8eef-0a1c99867cff tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 815.457475] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-0e26226b-2af4-47a7-8eef-0a1c99867cff tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 815.458072] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aac8a8bb-64ab-4e97-875f-597bf8315bf2 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.463242] env[62503]: DEBUG oslo_vmware.api [None req-0e26226b-2af4-47a7-8eef-0a1c99867cff tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Waiting for the task: (returnval){ [ 815.463242] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]5299ce59-df79-b4aa-1a47-22d19917fd28" [ 815.463242] env[62503]: _type = "Task" [ 815.463242] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 815.470501] env[62503]: DEBUG oslo_vmware.api [None req-0e26226b-2af4-47a7-8eef-0a1c99867cff tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]5299ce59-df79-b4aa-1a47-22d19917fd28, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 815.507796] env[62503]: DEBUG oslo_concurrency.lockutils [None req-ca6b327a-4e4e-486e-a0f1-74ffee8f5abc tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] Acquiring lock "cf611345-d276-4745-a2f8-0551c9dca2c2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 815.508036] env[62503]: DEBUG oslo_concurrency.lockutils [None req-ca6b327a-4e4e-486e-a0f1-74ffee8f5abc tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] Lock "cf611345-d276-4745-a2f8-0551c9dca2c2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 815.642907] env[62503]: DEBUG nova.compute.manager [None req-337e5891-71cd-4f0a-a7c5-d668c349bcf9 tempest-ServersAaction247Test-815974925 tempest-ServersAaction247Test-815974925-project-member] [instance: ef92e4ba-4ef3-4e26-9577-bad0c046ed47] Start spawning the instance on the hypervisor. {{(pid=62503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2641}} [ 815.673124] env[62503]: DEBUG nova.virt.hardware [None req-337e5891-71cd-4f0a-a7c5-d668c349bcf9 tempest-ServersAaction247Test-815974925 tempest-ServersAaction247Test-815974925-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:26:20Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:26:03Z,direct_url=,disk_format='vmdk',id=8150ca02-f879-471d-8913-459408f127a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26d9ee75d25b4018b8bb1fb11c8bd98c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:26:04Z,virtual_size=,visibility=), allow threads: False {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 815.673380] env[62503]: DEBUG nova.virt.hardware [None req-337e5891-71cd-4f0a-a7c5-d668c349bcf9 tempest-ServersAaction247Test-815974925 tempest-ServersAaction247Test-815974925-project-member] Flavor limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 815.673600] env[62503]: DEBUG nova.virt.hardware [None req-337e5891-71cd-4f0a-a7c5-d668c349bcf9 tempest-ServersAaction247Test-815974925 tempest-ServersAaction247Test-815974925-project-member] Image limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 815.673800] env[62503]: DEBUG nova.virt.hardware [None req-337e5891-71cd-4f0a-a7c5-d668c349bcf9 tempest-ServersAaction247Test-815974925 tempest-ServersAaction247Test-815974925-project-member] Flavor pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 815.673946] env[62503]: DEBUG nova.virt.hardware [None req-337e5891-71cd-4f0a-a7c5-d668c349bcf9 tempest-ServersAaction247Test-815974925 tempest-ServersAaction247Test-815974925-project-member] Image pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 815.674104] env[62503]: DEBUG nova.virt.hardware [None req-337e5891-71cd-4f0a-a7c5-d668c349bcf9 tempest-ServersAaction247Test-815974925 tempest-ServersAaction247Test-815974925-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 815.674314] env[62503]: DEBUG nova.virt.hardware [None req-337e5891-71cd-4f0a-a7c5-d668c349bcf9 tempest-ServersAaction247Test-815974925 tempest-ServersAaction247Test-815974925-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 815.674472] env[62503]: DEBUG nova.virt.hardware [None req-337e5891-71cd-4f0a-a7c5-d668c349bcf9 tempest-ServersAaction247Test-815974925 tempest-ServersAaction247Test-815974925-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 815.674636] env[62503]: DEBUG nova.virt.hardware [None req-337e5891-71cd-4f0a-a7c5-d668c349bcf9 tempest-ServersAaction247Test-815974925 tempest-ServersAaction247Test-815974925-project-member] Got 1 possible topologies {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 815.674796] env[62503]: DEBUG nova.virt.hardware [None req-337e5891-71cd-4f0a-a7c5-d668c349bcf9 tempest-ServersAaction247Test-815974925 tempest-ServersAaction247Test-815974925-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 815.674964] env[62503]: DEBUG nova.virt.hardware [None req-337e5891-71cd-4f0a-a7c5-d668c349bcf9 tempest-ServersAaction247Test-815974925 tempest-ServersAaction247Test-815974925-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 815.675828] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb2e4588-df32-4964-9ef9-d6d6be6416fb {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.683578] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0941307e-227c-4175-bb8a-6961e0216f09 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.698254] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-337e5891-71cd-4f0a-a7c5-d668c349bcf9 tempest-ServersAaction247Test-815974925 tempest-ServersAaction247Test-815974925-project-member] [instance: ef92e4ba-4ef3-4e26-9577-bad0c046ed47] Instance VIF info [] {{(pid=62503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 815.705323] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-337e5891-71cd-4f0a-a7c5-d668c349bcf9 tempest-ServersAaction247Test-815974925 tempest-ServersAaction247Test-815974925-project-member] Creating folder: Project (cce4a77249a54228812c75aeb53e304b). Parent ref: group-v294540. {{(pid=62503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 815.705617] env[62503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3d4c3f19-2560-4c1e-9348-1929d90c9656 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.716364] env[62503]: INFO nova.virt.vmwareapi.vm_util [None req-337e5891-71cd-4f0a-a7c5-d668c349bcf9 tempest-ServersAaction247Test-815974925 tempest-ServersAaction247Test-815974925-project-member] Created folder: Project (cce4a77249a54228812c75aeb53e304b) in parent group-v294540. [ 815.716598] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-337e5891-71cd-4f0a-a7c5-d668c349bcf9 tempest-ServersAaction247Test-815974925 tempest-ServersAaction247Test-815974925-project-member] Creating folder: Instances. Parent ref: group-v294565. {{(pid=62503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 815.716852] env[62503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9789ddb0-3dbb-45f4-89ff-2deb60280eab {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.725395] env[62503]: INFO nova.virt.vmwareapi.vm_util [None req-337e5891-71cd-4f0a-a7c5-d668c349bcf9 tempest-ServersAaction247Test-815974925 tempest-ServersAaction247Test-815974925-project-member] Created folder: Instances in parent group-v294565. [ 815.725657] env[62503]: DEBUG oslo.service.loopingcall [None req-337e5891-71cd-4f0a-a7c5-d668c349bcf9 tempest-ServersAaction247Test-815974925 tempest-ServersAaction247Test-815974925-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 815.725940] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ef92e4ba-4ef3-4e26-9577-bad0c046ed47] Creating VM on the ESX host {{(pid=62503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 815.726176] env[62503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-90d4a3ea-ebce-441d-8f83-94e39d197fac {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.742300] env[62503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 815.742300] env[62503]: value = "task-1387761" [ 815.742300] env[62503]: _type = "Task" [ 815.742300] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 815.749351] env[62503]: DEBUG oslo_vmware.api [-] Task: {'id': task-1387761, 'name': CreateVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 815.936238] env[62503]: DEBUG oslo_concurrency.lockutils [None req-47a18854-ca82-4c3a-9a6e-798e903be44b tempest-ServerRescueTestJSONUnderV235-862506334 tempest-ServerRescueTestJSONUnderV235-862506334-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.317s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 815.936778] env[62503]: DEBUG nova.compute.manager [None req-47a18854-ca82-4c3a-9a6e-798e903be44b tempest-ServerRescueTestJSONUnderV235-862506334 tempest-ServerRescueTestJSONUnderV235-862506334-project-member] [instance: 4cb117e3-ff57-4e7f-bb2b-a12c988e362c] Start building networks asynchronously for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2832}} [ 815.939569] env[62503]: DEBUG oslo_concurrency.lockutils [None req-0f7e3892-329e-443e-a8e9-e702f02c6757 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.655s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 815.941179] env[62503]: INFO nova.compute.claims [None req-0f7e3892-329e-443e-a8e9-e702f02c6757 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] [instance: 09688e22-9225-4619-a9aa-eddb332cb8ab] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 815.977735] env[62503]: DEBUG oslo_vmware.api [None req-0e26226b-2af4-47a7-8eef-0a1c99867cff tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]5299ce59-df79-b4aa-1a47-22d19917fd28, 'name': SearchDatastore_Task, 'duration_secs': 0.008055} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 815.979898] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f7f5fb22-8a58-4dd0-864d-5d42904eeb0c {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.987436] env[62503]: DEBUG oslo_vmware.api [None req-0e26226b-2af4-47a7-8eef-0a1c99867cff tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Waiting for the task: (returnval){ [ 815.987436] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]527e6179-008d-842a-9a52-7600db8e83f6" [ 815.987436] env[62503]: _type = "Task" [ 815.987436] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 815.998738] env[62503]: DEBUG oslo_vmware.api [None req-0e26226b-2af4-47a7-8eef-0a1c99867cff tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]527e6179-008d-842a-9a52-7600db8e83f6, 'name': SearchDatastore_Task} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 815.998992] env[62503]: DEBUG oslo_concurrency.lockutils [None req-0e26226b-2af4-47a7-8eef-0a1c99867cff tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Releasing lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 815.999251] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e26226b-2af4-47a7-8eef-0a1c99867cff tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk to [datastore1] 86422990-4215-4628-a7a7-4fdc910e304e/86422990-4215-4628-a7a7-4fdc910e304e.vmdk {{(pid=62503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 815.999529] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ede86360-3d16-4a87-b139-63bcc7a30afb {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.007451] env[62503]: DEBUG oslo_vmware.api [None req-0e26226b-2af4-47a7-8eef-0a1c99867cff tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Waiting for the task: (returnval){ [ 816.007451] env[62503]: value = "task-1387762" [ 816.007451] env[62503]: _type = "Task" [ 816.007451] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 816.016161] env[62503]: DEBUG oslo_vmware.api [None req-0e26226b-2af4-47a7-8eef-0a1c99867cff tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Task: {'id': task-1387762, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 816.253016] env[62503]: DEBUG oslo_vmware.api [-] Task: {'id': task-1387761, 'name': CreateVM_Task, 'duration_secs': 0.250529} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 816.253160] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ef92e4ba-4ef3-4e26-9577-bad0c046ed47] Created VM on the ESX host {{(pid=62503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 816.253591] env[62503]: DEBUG oslo_concurrency.lockutils [None req-337e5891-71cd-4f0a-a7c5-d668c349bcf9 tempest-ServersAaction247Test-815974925 tempest-ServersAaction247Test-815974925-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 816.253753] env[62503]: DEBUG oslo_concurrency.lockutils [None req-337e5891-71cd-4f0a-a7c5-d668c349bcf9 tempest-ServersAaction247Test-815974925 tempest-ServersAaction247Test-815974925-project-member] Acquired lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 816.254092] env[62503]: DEBUG oslo_concurrency.lockutils [None req-337e5891-71cd-4f0a-a7c5-d668c349bcf9 tempest-ServersAaction247Test-815974925 tempest-ServersAaction247Test-815974925-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 816.254356] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fda71bce-18a7-430e-bed6-004daccc5f7b {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.259553] env[62503]: DEBUG oslo_vmware.api [None req-337e5891-71cd-4f0a-a7c5-d668c349bcf9 tempest-ServersAaction247Test-815974925 tempest-ServersAaction247Test-815974925-project-member] Waiting for the task: (returnval){ [ 816.259553] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]528d3b70-3aaa-57d7-9e70-14a95ba974da" [ 816.259553] env[62503]: _type = "Task" [ 816.259553] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 816.267862] env[62503]: DEBUG oslo_vmware.api [None req-337e5891-71cd-4f0a-a7c5-d668c349bcf9 tempest-ServersAaction247Test-815974925 tempest-ServersAaction247Test-815974925-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]528d3b70-3aaa-57d7-9e70-14a95ba974da, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 816.447070] env[62503]: DEBUG nova.compute.utils [None req-47a18854-ca82-4c3a-9a6e-798e903be44b tempest-ServerRescueTestJSONUnderV235-862506334 tempest-ServerRescueTestJSONUnderV235-862506334-project-member] Using /dev/sd instead of None {{(pid=62503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 816.451744] env[62503]: DEBUG nova.compute.manager [None req-47a18854-ca82-4c3a-9a6e-798e903be44b tempest-ServerRescueTestJSONUnderV235-862506334 tempest-ServerRescueTestJSONUnderV235-862506334-project-member] [instance: 4cb117e3-ff57-4e7f-bb2b-a12c988e362c] Allocating IP information in the background. {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 816.451992] env[62503]: DEBUG nova.network.neutron [None req-47a18854-ca82-4c3a-9a6e-798e903be44b tempest-ServerRescueTestJSONUnderV235-862506334 tempest-ServerRescueTestJSONUnderV235-862506334-project-member] [instance: 4cb117e3-ff57-4e7f-bb2b-a12c988e362c] allocate_for_instance() {{(pid=62503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 816.493148] env[62503]: DEBUG nova.policy [None req-47a18854-ca82-4c3a-9a6e-798e903be44b tempest-ServerRescueTestJSONUnderV235-862506334 tempest-ServerRescueTestJSONUnderV235-862506334-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '34e046530aae4accb799ea7504056166', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '046d0563ab4a4d90ae0dbd7871e31aff', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62503) authorize /opt/stack/nova/nova/policy.py:201}} [ 816.516923] env[62503]: DEBUG oslo_vmware.api [None req-0e26226b-2af4-47a7-8eef-0a1c99867cff tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Task: {'id': task-1387762, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 816.749432] env[62503]: DEBUG nova.network.neutron [None req-47a18854-ca82-4c3a-9a6e-798e903be44b tempest-ServerRescueTestJSONUnderV235-862506334 tempest-ServerRescueTestJSONUnderV235-862506334-project-member] [instance: 4cb117e3-ff57-4e7f-bb2b-a12c988e362c] Successfully created port: bf3af8e0-63c1-45af-8f69-da97b8b24149 {{(pid=62503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 816.769719] env[62503]: DEBUG oslo_vmware.api [None req-337e5891-71cd-4f0a-a7c5-d668c349bcf9 tempest-ServersAaction247Test-815974925 tempest-ServersAaction247Test-815974925-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]528d3b70-3aaa-57d7-9e70-14a95ba974da, 'name': SearchDatastore_Task, 'duration_secs': 0.057776} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 816.770114] env[62503]: DEBUG oslo_concurrency.lockutils [None req-337e5891-71cd-4f0a-a7c5-d668c349bcf9 tempest-ServersAaction247Test-815974925 tempest-ServersAaction247Test-815974925-project-member] Releasing lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 816.770382] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-337e5891-71cd-4f0a-a7c5-d668c349bcf9 tempest-ServersAaction247Test-815974925 tempest-ServersAaction247Test-815974925-project-member] [instance: ef92e4ba-4ef3-4e26-9577-bad0c046ed47] Processing image 8150ca02-f879-471d-8913-459408f127a1 {{(pid=62503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 816.770638] env[62503]: DEBUG oslo_concurrency.lockutils [None req-337e5891-71cd-4f0a-a7c5-d668c349bcf9 tempest-ServersAaction247Test-815974925 tempest-ServersAaction247Test-815974925-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 816.770791] env[62503]: DEBUG oslo_concurrency.lockutils [None req-337e5891-71cd-4f0a-a7c5-d668c349bcf9 tempest-ServersAaction247Test-815974925 tempest-ServersAaction247Test-815974925-project-member] Acquired lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 816.770969] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-337e5891-71cd-4f0a-a7c5-d668c349bcf9 tempest-ServersAaction247Test-815974925 tempest-ServersAaction247Test-815974925-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 816.771235] env[62503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-759b91dc-ff82-4a4a-9c5b-275d78e05074 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.778360] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-337e5891-71cd-4f0a-a7c5-d668c349bcf9 tempest-ServersAaction247Test-815974925 tempest-ServersAaction247Test-815974925-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 816.778541] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-337e5891-71cd-4f0a-a7c5-d668c349bcf9 tempest-ServersAaction247Test-815974925 tempest-ServersAaction247Test-815974925-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 816.779221] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b6a650de-bbe4-45ef-9771-4acafae8d287 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.784134] env[62503]: DEBUG oslo_vmware.api [None req-337e5891-71cd-4f0a-a7c5-d668c349bcf9 tempest-ServersAaction247Test-815974925 tempest-ServersAaction247Test-815974925-project-member] Waiting for the task: (returnval){ [ 816.784134] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]525ee34e-8ade-d18c-7acd-13dcecc9a1be" [ 816.784134] env[62503]: _type = "Task" [ 816.784134] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 816.791101] env[62503]: DEBUG oslo_vmware.api [None req-337e5891-71cd-4f0a-a7c5-d668c349bcf9 tempest-ServersAaction247Test-815974925 tempest-ServersAaction247Test-815974925-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]525ee34e-8ade-d18c-7acd-13dcecc9a1be, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 816.952761] env[62503]: DEBUG nova.compute.manager [None req-47a18854-ca82-4c3a-9a6e-798e903be44b tempest-ServerRescueTestJSONUnderV235-862506334 tempest-ServerRescueTestJSONUnderV235-862506334-project-member] [instance: 4cb117e3-ff57-4e7f-bb2b-a12c988e362c] Start building block device mappings for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2867}} [ 817.018273] env[62503]: DEBUG oslo_vmware.api [None req-0e26226b-2af4-47a7-8eef-0a1c99867cff tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Task: {'id': task-1387762, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.535256} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 817.020898] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e26226b-2af4-47a7-8eef-0a1c99867cff tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk to [datastore1] 86422990-4215-4628-a7a7-4fdc910e304e/86422990-4215-4628-a7a7-4fdc910e304e.vmdk {{(pid=62503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 817.021136] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-0e26226b-2af4-47a7-8eef-0a1c99867cff tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] [instance: 86422990-4215-4628-a7a7-4fdc910e304e] Extending root virtual disk to 1048576 {{(pid=62503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 817.021599] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ff63433d-3e7f-4eec-a8ee-6c32f8910dbd {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.027517] env[62503]: DEBUG oslo_vmware.api [None req-0e26226b-2af4-47a7-8eef-0a1c99867cff tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Waiting for the task: (returnval){ [ 817.027517] env[62503]: value = "task-1387763" [ 817.027517] env[62503]: _type = "Task" [ 817.027517] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 817.037688] env[62503]: DEBUG oslo_vmware.api [None req-0e26226b-2af4-47a7-8eef-0a1c99867cff tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Task: {'id': task-1387763, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 817.297013] env[62503]: DEBUG oslo_vmware.api [None req-337e5891-71cd-4f0a-a7c5-d668c349bcf9 tempest-ServersAaction247Test-815974925 tempest-ServersAaction247Test-815974925-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]525ee34e-8ade-d18c-7acd-13dcecc9a1be, 'name': SearchDatastore_Task, 'duration_secs': 0.007943} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 817.300198] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-66cc71d3-a8e4-4f39-b3d3-42682d27c946 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.305361] env[62503]: DEBUG oslo_vmware.api [None req-337e5891-71cd-4f0a-a7c5-d668c349bcf9 tempest-ServersAaction247Test-815974925 tempest-ServersAaction247Test-815974925-project-member] Waiting for the task: (returnval){ [ 817.305361] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]525ed82a-2458-2034-226f-bb878d2c1937" [ 817.305361] env[62503]: _type = "Task" [ 817.305361] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 817.313075] env[62503]: DEBUG oslo_vmware.api [None req-337e5891-71cd-4f0a-a7c5-d668c349bcf9 tempest-ServersAaction247Test-815974925 tempest-ServersAaction247Test-815974925-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]525ed82a-2458-2034-226f-bb878d2c1937, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 817.344254] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c3cf9f5-ace8-48eb-a3da-080b1c5abcf4 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.351015] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97bbcdcd-6d59-4fbe-980f-986c6703dcb6 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.383873] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3753729c-8a5f-4030-ace4-147970c48665 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.391093] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7129f3eb-0b85-47ae-ae36-16a33c1314b4 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.403789] env[62503]: DEBUG nova.compute.provider_tree [None req-0f7e3892-329e-443e-a8e9-e702f02c6757 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] Updating inventory in ProviderTree for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 817.536881] env[62503]: DEBUG oslo_vmware.api [None req-0e26226b-2af4-47a7-8eef-0a1c99867cff tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Task: {'id': task-1387763, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.062486} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 817.538537] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-0e26226b-2af4-47a7-8eef-0a1c99867cff tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] [instance: 86422990-4215-4628-a7a7-4fdc910e304e] Extended root virtual disk {{(pid=62503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 817.539716] env[62503]: DEBUG nova.compute.manager [req-2f6c09c6-5a84-4b08-b105-65c8a20b1967 req-589c3a7f-9f93-4e5f-b049-b568038dd038 service nova] [instance: 4cb117e3-ff57-4e7f-bb2b-a12c988e362c] Received event network-changed-bf3af8e0-63c1-45af-8f69-da97b8b24149 {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 817.540115] env[62503]: DEBUG nova.compute.manager [req-2f6c09c6-5a84-4b08-b105-65c8a20b1967 req-589c3a7f-9f93-4e5f-b049-b568038dd038 service nova] [instance: 4cb117e3-ff57-4e7f-bb2b-a12c988e362c] Refreshing instance network info cache due to event network-changed-bf3af8e0-63c1-45af-8f69-da97b8b24149. {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11432}} [ 817.540379] env[62503]: DEBUG oslo_concurrency.lockutils [req-2f6c09c6-5a84-4b08-b105-65c8a20b1967 req-589c3a7f-9f93-4e5f-b049-b568038dd038 service nova] Acquiring lock "refresh_cache-4cb117e3-ff57-4e7f-bb2b-a12c988e362c" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 817.540561] env[62503]: DEBUG oslo_concurrency.lockutils [req-2f6c09c6-5a84-4b08-b105-65c8a20b1967 req-589c3a7f-9f93-4e5f-b049-b568038dd038 service nova] Acquired lock "refresh_cache-4cb117e3-ff57-4e7f-bb2b-a12c988e362c" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 817.540751] env[62503]: DEBUG nova.network.neutron [req-2f6c09c6-5a84-4b08-b105-65c8a20b1967 req-589c3a7f-9f93-4e5f-b049-b568038dd038 service nova] [instance: 4cb117e3-ff57-4e7f-bb2b-a12c988e362c] Refreshing network info cache for port bf3af8e0-63c1-45af-8f69-da97b8b24149 {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 817.543442] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b128b144-7e2e-4f8e-a0fa-693e8d0dcb19 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.565948] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-0e26226b-2af4-47a7-8eef-0a1c99867cff tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] [instance: 86422990-4215-4628-a7a7-4fdc910e304e] Reconfiguring VM instance instance-00000039 to attach disk [datastore1] 86422990-4215-4628-a7a7-4fdc910e304e/86422990-4215-4628-a7a7-4fdc910e304e.vmdk or device None with type sparse {{(pid=62503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 817.566660] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0df551cb-da4d-486a-8a0f-8d27ae92951a {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.585841] env[62503]: DEBUG oslo_vmware.api [None req-0e26226b-2af4-47a7-8eef-0a1c99867cff tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Waiting for the task: (returnval){ [ 817.585841] env[62503]: value = "task-1387764" [ 817.585841] env[62503]: _type = "Task" [ 817.585841] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 817.593184] env[62503]: DEBUG oslo_vmware.api [None req-0e26226b-2af4-47a7-8eef-0a1c99867cff tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Task: {'id': task-1387764, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 817.707180] env[62503]: ERROR nova.compute.manager [None req-47a18854-ca82-4c3a-9a6e-798e903be44b tempest-ServerRescueTestJSONUnderV235-862506334 tempest-ServerRescueTestJSONUnderV235-862506334-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port bf3af8e0-63c1-45af-8f69-da97b8b24149, please check neutron logs for more information. [ 817.707180] env[62503]: ERROR nova.compute.manager Traceback (most recent call last): [ 817.707180] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 817.707180] env[62503]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 817.707180] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 817.707180] env[62503]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 817.707180] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 817.707180] env[62503]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 817.707180] env[62503]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 817.707180] env[62503]: ERROR nova.compute.manager self.force_reraise() [ 817.707180] env[62503]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 817.707180] env[62503]: ERROR nova.compute.manager raise self.value [ 817.707180] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 817.707180] env[62503]: ERROR nova.compute.manager updated_port = self._update_port( [ 817.707180] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 817.707180] env[62503]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 817.707725] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 817.707725] env[62503]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 817.707725] env[62503]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port bf3af8e0-63c1-45af-8f69-da97b8b24149, please check neutron logs for more information. [ 817.707725] env[62503]: ERROR nova.compute.manager [ 817.707725] env[62503]: Traceback (most recent call last): [ 817.707725] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 817.707725] env[62503]: listener.cb(fileno) [ 817.707725] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 817.707725] env[62503]: result = function(*args, **kwargs) [ 817.707725] env[62503]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 817.707725] env[62503]: return func(*args, **kwargs) [ 817.707725] env[62503]: File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 817.707725] env[62503]: raise e [ 817.707725] env[62503]: File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 817.707725] env[62503]: nwinfo = self.network_api.allocate_for_instance( [ 817.707725] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 817.707725] env[62503]: created_port_ids = self._update_ports_for_instance( [ 817.707725] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 817.707725] env[62503]: with excutils.save_and_reraise_exception(): [ 817.707725] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 817.707725] env[62503]: self.force_reraise() [ 817.707725] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 817.707725] env[62503]: raise self.value [ 817.707725] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 817.707725] env[62503]: updated_port = self._update_port( [ 817.707725] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 817.707725] env[62503]: _ensure_no_port_binding_failure(port) [ 817.707725] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 817.707725] env[62503]: raise exception.PortBindingFailed(port_id=port['id']) [ 817.708437] env[62503]: nova.exception.PortBindingFailed: Binding failed for port bf3af8e0-63c1-45af-8f69-da97b8b24149, please check neutron logs for more information. [ 817.708437] env[62503]: Removing descriptor: 14 [ 817.815267] env[62503]: DEBUG oslo_vmware.api [None req-337e5891-71cd-4f0a-a7c5-d668c349bcf9 tempest-ServersAaction247Test-815974925 tempest-ServersAaction247Test-815974925-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]525ed82a-2458-2034-226f-bb878d2c1937, 'name': SearchDatastore_Task, 'duration_secs': 0.00912} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 817.815525] env[62503]: DEBUG oslo_concurrency.lockutils [None req-337e5891-71cd-4f0a-a7c5-d668c349bcf9 tempest-ServersAaction247Test-815974925 tempest-ServersAaction247Test-815974925-project-member] Releasing lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 817.815826] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-337e5891-71cd-4f0a-a7c5-d668c349bcf9 tempest-ServersAaction247Test-815974925 tempest-ServersAaction247Test-815974925-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk to [datastore1] ef92e4ba-4ef3-4e26-9577-bad0c046ed47/ef92e4ba-4ef3-4e26-9577-bad0c046ed47.vmdk {{(pid=62503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 817.816093] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9cb366f6-c0ac-4095-bb81-7ad5ddfc52b5 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.823762] env[62503]: DEBUG oslo_vmware.api [None req-337e5891-71cd-4f0a-a7c5-d668c349bcf9 tempest-ServersAaction247Test-815974925 tempest-ServersAaction247Test-815974925-project-member] Waiting for the task: (returnval){ [ 817.823762] env[62503]: value = "task-1387765" [ 817.823762] env[62503]: _type = "Task" [ 817.823762] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 817.831341] env[62503]: DEBUG oslo_vmware.api [None req-337e5891-71cd-4f0a-a7c5-d668c349bcf9 tempest-ServersAaction247Test-815974925 tempest-ServersAaction247Test-815974925-project-member] Task: {'id': task-1387765, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 817.925328] env[62503]: ERROR nova.scheduler.client.report [None req-0f7e3892-329e-443e-a8e9-e702f02c6757 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] [req-0766e5d3-0da2-48fd-aca7-cb001c4fb58e] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-0766e5d3-0da2-48fd-aca7-cb001c4fb58e"}]} [ 817.942036] env[62503]: DEBUG nova.scheduler.client.report [None req-0f7e3892-329e-443e-a8e9-e702f02c6757 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] Refreshing inventories for resource provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:819}} [ 817.957094] env[62503]: DEBUG nova.scheduler.client.report [None req-0f7e3892-329e-443e-a8e9-e702f02c6757 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] Updating ProviderTree inventory for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:783}} [ 817.957354] env[62503]: DEBUG nova.compute.provider_tree [None req-0f7e3892-329e-443e-a8e9-e702f02c6757 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] Updating inventory in ProviderTree for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 817.964872] env[62503]: DEBUG nova.compute.manager [None req-47a18854-ca82-4c3a-9a6e-798e903be44b tempest-ServerRescueTestJSONUnderV235-862506334 tempest-ServerRescueTestJSONUnderV235-862506334-project-member] [instance: 4cb117e3-ff57-4e7f-bb2b-a12c988e362c] Start spawning the instance on the hypervisor. {{(pid=62503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2641}} [ 817.970631] env[62503]: DEBUG nova.scheduler.client.report [None req-0f7e3892-329e-443e-a8e9-e702f02c6757 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] Refreshing aggregate associations for resource provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2, aggregates: None {{(pid=62503) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:828}} [ 817.989031] env[62503]: DEBUG nova.scheduler.client.report [None req-0f7e3892-329e-443e-a8e9-e702f02c6757 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] Refreshing trait associations for resource provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2, traits: HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=62503) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:840}} [ 817.993628] env[62503]: DEBUG nova.virt.hardware [None req-47a18854-ca82-4c3a-9a6e-798e903be44b tempest-ServerRescueTestJSONUnderV235-862506334 tempest-ServerRescueTestJSONUnderV235-862506334-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:26:20Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:26:03Z,direct_url=,disk_format='vmdk',id=8150ca02-f879-471d-8913-459408f127a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26d9ee75d25b4018b8bb1fb11c8bd98c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:26:04Z,virtual_size=,visibility=), allow threads: False {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 817.993878] env[62503]: DEBUG nova.virt.hardware [None req-47a18854-ca82-4c3a-9a6e-798e903be44b tempest-ServerRescueTestJSONUnderV235-862506334 tempest-ServerRescueTestJSONUnderV235-862506334-project-member] Flavor limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 817.994041] env[62503]: DEBUG nova.virt.hardware [None req-47a18854-ca82-4c3a-9a6e-798e903be44b tempest-ServerRescueTestJSONUnderV235-862506334 tempest-ServerRescueTestJSONUnderV235-862506334-project-member] Image limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 817.994241] env[62503]: DEBUG nova.virt.hardware [None req-47a18854-ca82-4c3a-9a6e-798e903be44b tempest-ServerRescueTestJSONUnderV235-862506334 tempest-ServerRescueTestJSONUnderV235-862506334-project-member] Flavor pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 817.994382] env[62503]: DEBUG nova.virt.hardware [None req-47a18854-ca82-4c3a-9a6e-798e903be44b tempest-ServerRescueTestJSONUnderV235-862506334 tempest-ServerRescueTestJSONUnderV235-862506334-project-member] Image pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 817.994526] env[62503]: DEBUG nova.virt.hardware [None req-47a18854-ca82-4c3a-9a6e-798e903be44b tempest-ServerRescueTestJSONUnderV235-862506334 tempest-ServerRescueTestJSONUnderV235-862506334-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 817.994733] env[62503]: DEBUG nova.virt.hardware [None req-47a18854-ca82-4c3a-9a6e-798e903be44b tempest-ServerRescueTestJSONUnderV235-862506334 tempest-ServerRescueTestJSONUnderV235-862506334-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 817.994887] env[62503]: DEBUG nova.virt.hardware [None req-47a18854-ca82-4c3a-9a6e-798e903be44b tempest-ServerRescueTestJSONUnderV235-862506334 tempest-ServerRescueTestJSONUnderV235-862506334-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 817.995087] env[62503]: DEBUG nova.virt.hardware [None req-47a18854-ca82-4c3a-9a6e-798e903be44b tempest-ServerRescueTestJSONUnderV235-862506334 tempest-ServerRescueTestJSONUnderV235-862506334-project-member] Got 1 possible topologies {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 817.995266] env[62503]: DEBUG nova.virt.hardware [None req-47a18854-ca82-4c3a-9a6e-798e903be44b tempest-ServerRescueTestJSONUnderV235-862506334 tempest-ServerRescueTestJSONUnderV235-862506334-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 817.995443] env[62503]: DEBUG nova.virt.hardware [None req-47a18854-ca82-4c3a-9a6e-798e903be44b tempest-ServerRescueTestJSONUnderV235-862506334 tempest-ServerRescueTestJSONUnderV235-862506334-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 817.996481] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9d18691-b9eb-4ead-910c-359061fb0b80 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.005169] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eaeacea4-3c06-41ee-bbd1-b6d29d355782 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.021979] env[62503]: ERROR nova.compute.manager [None req-47a18854-ca82-4c3a-9a6e-798e903be44b tempest-ServerRescueTestJSONUnderV235-862506334 tempest-ServerRescueTestJSONUnderV235-862506334-project-member] [instance: 4cb117e3-ff57-4e7f-bb2b-a12c988e362c] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port bf3af8e0-63c1-45af-8f69-da97b8b24149, please check neutron logs for more information. [ 818.021979] env[62503]: ERROR nova.compute.manager [instance: 4cb117e3-ff57-4e7f-bb2b-a12c988e362c] Traceback (most recent call last): [ 818.021979] env[62503]: ERROR nova.compute.manager [instance: 4cb117e3-ff57-4e7f-bb2b-a12c988e362c] File "/opt/stack/nova/nova/compute/manager.py", line 2897, in _build_resources [ 818.021979] env[62503]: ERROR nova.compute.manager [instance: 4cb117e3-ff57-4e7f-bb2b-a12c988e362c] yield resources [ 818.021979] env[62503]: ERROR nova.compute.manager [instance: 4cb117e3-ff57-4e7f-bb2b-a12c988e362c] File "/opt/stack/nova/nova/compute/manager.py", line 2644, in _build_and_run_instance [ 818.021979] env[62503]: ERROR nova.compute.manager [instance: 4cb117e3-ff57-4e7f-bb2b-a12c988e362c] self.driver.spawn(context, instance, image_meta, [ 818.021979] env[62503]: ERROR nova.compute.manager [instance: 4cb117e3-ff57-4e7f-bb2b-a12c988e362c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 818.021979] env[62503]: ERROR nova.compute.manager [instance: 4cb117e3-ff57-4e7f-bb2b-a12c988e362c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 818.021979] env[62503]: ERROR nova.compute.manager [instance: 4cb117e3-ff57-4e7f-bb2b-a12c988e362c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 818.021979] env[62503]: ERROR nova.compute.manager [instance: 4cb117e3-ff57-4e7f-bb2b-a12c988e362c] vm_ref = self.build_virtual_machine(instance, [ 818.021979] env[62503]: ERROR nova.compute.manager [instance: 4cb117e3-ff57-4e7f-bb2b-a12c988e362c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 818.022349] env[62503]: ERROR nova.compute.manager [instance: 4cb117e3-ff57-4e7f-bb2b-a12c988e362c] vif_infos = vmwarevif.get_vif_info(self._session, [ 818.022349] env[62503]: ERROR nova.compute.manager [instance: 4cb117e3-ff57-4e7f-bb2b-a12c988e362c] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 818.022349] env[62503]: ERROR nova.compute.manager [instance: 4cb117e3-ff57-4e7f-bb2b-a12c988e362c] for vif in network_info: [ 818.022349] env[62503]: ERROR nova.compute.manager [instance: 4cb117e3-ff57-4e7f-bb2b-a12c988e362c] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 818.022349] env[62503]: ERROR nova.compute.manager [instance: 4cb117e3-ff57-4e7f-bb2b-a12c988e362c] return self._sync_wrapper(fn, *args, **kwargs) [ 818.022349] env[62503]: ERROR nova.compute.manager [instance: 4cb117e3-ff57-4e7f-bb2b-a12c988e362c] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 818.022349] env[62503]: ERROR nova.compute.manager [instance: 4cb117e3-ff57-4e7f-bb2b-a12c988e362c] self.wait() [ 818.022349] env[62503]: ERROR nova.compute.manager [instance: 4cb117e3-ff57-4e7f-bb2b-a12c988e362c] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 818.022349] env[62503]: ERROR nova.compute.manager [instance: 4cb117e3-ff57-4e7f-bb2b-a12c988e362c] self[:] = self._gt.wait() [ 818.022349] env[62503]: ERROR nova.compute.manager [instance: 4cb117e3-ff57-4e7f-bb2b-a12c988e362c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 818.022349] env[62503]: ERROR nova.compute.manager [instance: 4cb117e3-ff57-4e7f-bb2b-a12c988e362c] return self._exit_event.wait() [ 818.022349] env[62503]: ERROR nova.compute.manager [instance: 4cb117e3-ff57-4e7f-bb2b-a12c988e362c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 818.022349] env[62503]: ERROR nova.compute.manager [instance: 4cb117e3-ff57-4e7f-bb2b-a12c988e362c] current.throw(*self._exc) [ 818.022646] env[62503]: ERROR nova.compute.manager [instance: 4cb117e3-ff57-4e7f-bb2b-a12c988e362c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 818.022646] env[62503]: ERROR nova.compute.manager [instance: 4cb117e3-ff57-4e7f-bb2b-a12c988e362c] result = function(*args, **kwargs) [ 818.022646] env[62503]: ERROR nova.compute.manager [instance: 4cb117e3-ff57-4e7f-bb2b-a12c988e362c] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 818.022646] env[62503]: ERROR nova.compute.manager [instance: 4cb117e3-ff57-4e7f-bb2b-a12c988e362c] return func(*args, **kwargs) [ 818.022646] env[62503]: ERROR nova.compute.manager [instance: 4cb117e3-ff57-4e7f-bb2b-a12c988e362c] File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 818.022646] env[62503]: ERROR nova.compute.manager [instance: 4cb117e3-ff57-4e7f-bb2b-a12c988e362c] raise e [ 818.022646] env[62503]: ERROR nova.compute.manager [instance: 4cb117e3-ff57-4e7f-bb2b-a12c988e362c] File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 818.022646] env[62503]: ERROR nova.compute.manager [instance: 4cb117e3-ff57-4e7f-bb2b-a12c988e362c] nwinfo = self.network_api.allocate_for_instance( [ 818.022646] env[62503]: ERROR nova.compute.manager [instance: 4cb117e3-ff57-4e7f-bb2b-a12c988e362c] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 818.022646] env[62503]: ERROR nova.compute.manager [instance: 4cb117e3-ff57-4e7f-bb2b-a12c988e362c] created_port_ids = self._update_ports_for_instance( [ 818.022646] env[62503]: ERROR nova.compute.manager [instance: 4cb117e3-ff57-4e7f-bb2b-a12c988e362c] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 818.022646] env[62503]: ERROR nova.compute.manager [instance: 4cb117e3-ff57-4e7f-bb2b-a12c988e362c] with excutils.save_and_reraise_exception(): [ 818.022646] env[62503]: ERROR nova.compute.manager [instance: 4cb117e3-ff57-4e7f-bb2b-a12c988e362c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 818.022941] env[62503]: ERROR nova.compute.manager [instance: 4cb117e3-ff57-4e7f-bb2b-a12c988e362c] self.force_reraise() [ 818.022941] env[62503]: ERROR nova.compute.manager [instance: 4cb117e3-ff57-4e7f-bb2b-a12c988e362c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 818.022941] env[62503]: ERROR nova.compute.manager [instance: 4cb117e3-ff57-4e7f-bb2b-a12c988e362c] raise self.value [ 818.022941] env[62503]: ERROR nova.compute.manager [instance: 4cb117e3-ff57-4e7f-bb2b-a12c988e362c] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 818.022941] env[62503]: ERROR nova.compute.manager [instance: 4cb117e3-ff57-4e7f-bb2b-a12c988e362c] updated_port = self._update_port( [ 818.022941] env[62503]: ERROR nova.compute.manager [instance: 4cb117e3-ff57-4e7f-bb2b-a12c988e362c] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 818.022941] env[62503]: ERROR nova.compute.manager [instance: 4cb117e3-ff57-4e7f-bb2b-a12c988e362c] _ensure_no_port_binding_failure(port) [ 818.022941] env[62503]: ERROR nova.compute.manager [instance: 4cb117e3-ff57-4e7f-bb2b-a12c988e362c] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 818.022941] env[62503]: ERROR nova.compute.manager [instance: 4cb117e3-ff57-4e7f-bb2b-a12c988e362c] raise exception.PortBindingFailed(port_id=port['id']) [ 818.022941] env[62503]: ERROR nova.compute.manager [instance: 4cb117e3-ff57-4e7f-bb2b-a12c988e362c] nova.exception.PortBindingFailed: Binding failed for port bf3af8e0-63c1-45af-8f69-da97b8b24149, please check neutron logs for more information. [ 818.022941] env[62503]: ERROR nova.compute.manager [instance: 4cb117e3-ff57-4e7f-bb2b-a12c988e362c] [ 818.022941] env[62503]: INFO nova.compute.manager [None req-47a18854-ca82-4c3a-9a6e-798e903be44b tempest-ServerRescueTestJSONUnderV235-862506334 tempest-ServerRescueTestJSONUnderV235-862506334-project-member] [instance: 4cb117e3-ff57-4e7f-bb2b-a12c988e362c] Terminating instance [ 818.026996] env[62503]: DEBUG oslo_concurrency.lockutils [None req-47a18854-ca82-4c3a-9a6e-798e903be44b tempest-ServerRescueTestJSONUnderV235-862506334 tempest-ServerRescueTestJSONUnderV235-862506334-project-member] Acquiring lock "refresh_cache-4cb117e3-ff57-4e7f-bb2b-a12c988e362c" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 818.066325] env[62503]: DEBUG nova.network.neutron [req-2f6c09c6-5a84-4b08-b105-65c8a20b1967 req-589c3a7f-9f93-4e5f-b049-b568038dd038 service nova] [instance: 4cb117e3-ff57-4e7f-bb2b-a12c988e362c] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 818.096567] env[62503]: DEBUG oslo_vmware.api [None req-0e26226b-2af4-47a7-8eef-0a1c99867cff tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Task: {'id': task-1387764, 'name': ReconfigVM_Task, 'duration_secs': 0.257254} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 818.097256] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-0e26226b-2af4-47a7-8eef-0a1c99867cff tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] [instance: 86422990-4215-4628-a7a7-4fdc910e304e] Reconfigured VM instance instance-00000039 to attach disk [datastore1] 86422990-4215-4628-a7a7-4fdc910e304e/86422990-4215-4628-a7a7-4fdc910e304e.vmdk or device None with type sparse {{(pid=62503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 818.097502] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-eb1b785f-20a8-4030-a935-aa897cb3e9b5 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.104456] env[62503]: DEBUG oslo_vmware.api [None req-0e26226b-2af4-47a7-8eef-0a1c99867cff tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Waiting for the task: (returnval){ [ 818.104456] env[62503]: value = "task-1387766" [ 818.104456] env[62503]: _type = "Task" [ 818.104456] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 818.114414] env[62503]: DEBUG oslo_vmware.api [None req-0e26226b-2af4-47a7-8eef-0a1c99867cff tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Task: {'id': task-1387766, 'name': Rename_Task} progress is 5%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 818.196502] env[62503]: DEBUG nova.network.neutron [req-2f6c09c6-5a84-4b08-b105-65c8a20b1967 req-589c3a7f-9f93-4e5f-b049-b568038dd038 service nova] [instance: 4cb117e3-ff57-4e7f-bb2b-a12c988e362c] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 818.270735] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e29f0f02-bc39-4f1c-9483-2718de5cc85e {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.279171] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0c12a87-1cde-4e20-98fb-a0aa3c300bc4 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.309994] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e381440-3e9a-4a75-a683-62e4d5b65120 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.318116] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e24fff85-7989-4eb1-a3fa-f8e827aad532 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.330100] env[62503]: DEBUG nova.compute.provider_tree [None req-0f7e3892-329e-443e-a8e9-e702f02c6757 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] Updating inventory in ProviderTree for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 818.339147] env[62503]: DEBUG oslo_vmware.api [None req-337e5891-71cd-4f0a-a7c5-d668c349bcf9 tempest-ServersAaction247Test-815974925 tempest-ServersAaction247Test-815974925-project-member] Task: {'id': task-1387765, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.465061} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 818.339445] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-337e5891-71cd-4f0a-a7c5-d668c349bcf9 tempest-ServersAaction247Test-815974925 tempest-ServersAaction247Test-815974925-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk to [datastore1] ef92e4ba-4ef3-4e26-9577-bad0c046ed47/ef92e4ba-4ef3-4e26-9577-bad0c046ed47.vmdk {{(pid=62503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 818.339653] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-337e5891-71cd-4f0a-a7c5-d668c349bcf9 tempest-ServersAaction247Test-815974925 tempest-ServersAaction247Test-815974925-project-member] [instance: ef92e4ba-4ef3-4e26-9577-bad0c046ed47] Extending root virtual disk to 1048576 {{(pid=62503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 818.339899] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7e250611-5116-4f7e-a9ce-5e7386df4952 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.346608] env[62503]: DEBUG oslo_vmware.api [None req-337e5891-71cd-4f0a-a7c5-d668c349bcf9 tempest-ServersAaction247Test-815974925 tempest-ServersAaction247Test-815974925-project-member] Waiting for the task: (returnval){ [ 818.346608] env[62503]: value = "task-1387767" [ 818.346608] env[62503]: _type = "Task" [ 818.346608] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 818.354242] env[62503]: DEBUG oslo_vmware.api [None req-337e5891-71cd-4f0a-a7c5-d668c349bcf9 tempest-ServersAaction247Test-815974925 tempest-ServersAaction247Test-815974925-project-member] Task: {'id': task-1387767, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 818.614209] env[62503]: DEBUG oslo_vmware.api [None req-0e26226b-2af4-47a7-8eef-0a1c99867cff tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Task: {'id': task-1387766, 'name': Rename_Task, 'duration_secs': 0.165084} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 818.614343] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e26226b-2af4-47a7-8eef-0a1c99867cff tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] [instance: 86422990-4215-4628-a7a7-4fdc910e304e] Powering on the VM {{(pid=62503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 818.614584] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-65486eb9-524e-4dfd-bf89-0a0cd5033270 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.620938] env[62503]: DEBUG oslo_vmware.api [None req-0e26226b-2af4-47a7-8eef-0a1c99867cff tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Waiting for the task: (returnval){ [ 818.620938] env[62503]: value = "task-1387768" [ 818.620938] env[62503]: _type = "Task" [ 818.620938] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 818.628425] env[62503]: DEBUG oslo_vmware.api [None req-0e26226b-2af4-47a7-8eef-0a1c99867cff tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Task: {'id': task-1387768, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 818.700251] env[62503]: DEBUG oslo_concurrency.lockutils [req-2f6c09c6-5a84-4b08-b105-65c8a20b1967 req-589c3a7f-9f93-4e5f-b049-b568038dd038 service nova] Releasing lock "refresh_cache-4cb117e3-ff57-4e7f-bb2b-a12c988e362c" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 818.700831] env[62503]: DEBUG oslo_concurrency.lockutils [None req-47a18854-ca82-4c3a-9a6e-798e903be44b tempest-ServerRescueTestJSONUnderV235-862506334 tempest-ServerRescueTestJSONUnderV235-862506334-project-member] Acquired lock "refresh_cache-4cb117e3-ff57-4e7f-bb2b-a12c988e362c" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 818.701050] env[62503]: DEBUG nova.network.neutron [None req-47a18854-ca82-4c3a-9a6e-798e903be44b tempest-ServerRescueTestJSONUnderV235-862506334 tempest-ServerRescueTestJSONUnderV235-862506334-project-member] [instance: 4cb117e3-ff57-4e7f-bb2b-a12c988e362c] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 818.857515] env[62503]: DEBUG oslo_vmware.api [None req-337e5891-71cd-4f0a-a7c5-d668c349bcf9 tempest-ServersAaction247Test-815974925 tempest-ServersAaction247Test-815974925-project-member] Task: {'id': task-1387767, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.058209} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 818.857515] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-337e5891-71cd-4f0a-a7c5-d668c349bcf9 tempest-ServersAaction247Test-815974925 tempest-ServersAaction247Test-815974925-project-member] [instance: ef92e4ba-4ef3-4e26-9577-bad0c046ed47] Extended root virtual disk {{(pid=62503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 818.857515] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b91003e-9272-46da-852a-5471c3aae95b {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.877429] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-337e5891-71cd-4f0a-a7c5-d668c349bcf9 tempest-ServersAaction247Test-815974925 tempest-ServersAaction247Test-815974925-project-member] [instance: ef92e4ba-4ef3-4e26-9577-bad0c046ed47] Reconfiguring VM instance instance-0000003a to attach disk [datastore1] ef92e4ba-4ef3-4e26-9577-bad0c046ed47/ef92e4ba-4ef3-4e26-9577-bad0c046ed47.vmdk or device None with type sparse {{(pid=62503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 818.878431] env[62503]: DEBUG nova.scheduler.client.report [None req-0f7e3892-329e-443e-a8e9-e702f02c6757 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] Updated inventory for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 with generation 79 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:972}} [ 818.878699] env[62503]: DEBUG nova.compute.provider_tree [None req-0f7e3892-329e-443e-a8e9-e702f02c6757 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] Updating resource provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 generation from 79 to 80 during operation: update_inventory {{(pid=62503) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 818.878850] env[62503]: DEBUG nova.compute.provider_tree [None req-0f7e3892-329e-443e-a8e9-e702f02c6757 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] Updating inventory in ProviderTree for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 818.881980] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b47aeed4-ea0f-40a7-a08f-06116cf3441b {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.901892] env[62503]: DEBUG oslo_vmware.api [None req-337e5891-71cd-4f0a-a7c5-d668c349bcf9 tempest-ServersAaction247Test-815974925 tempest-ServersAaction247Test-815974925-project-member] Waiting for the task: (returnval){ [ 818.901892] env[62503]: value = "task-1387769" [ 818.901892] env[62503]: _type = "Task" [ 818.901892] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 818.909511] env[62503]: DEBUG oslo_vmware.api [None req-337e5891-71cd-4f0a-a7c5-d668c349bcf9 tempest-ServersAaction247Test-815974925 tempest-ServersAaction247Test-815974925-project-member] Task: {'id': task-1387769, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 819.130675] env[62503]: DEBUG oslo_vmware.api [None req-0e26226b-2af4-47a7-8eef-0a1c99867cff tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Task: {'id': task-1387768, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 819.218370] env[62503]: DEBUG nova.network.neutron [None req-47a18854-ca82-4c3a-9a6e-798e903be44b tempest-ServerRescueTestJSONUnderV235-862506334 tempest-ServerRescueTestJSONUnderV235-862506334-project-member] [instance: 4cb117e3-ff57-4e7f-bb2b-a12c988e362c] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 819.300949] env[62503]: DEBUG nova.network.neutron [None req-47a18854-ca82-4c3a-9a6e-798e903be44b tempest-ServerRescueTestJSONUnderV235-862506334 tempest-ServerRescueTestJSONUnderV235-862506334-project-member] [instance: 4cb117e3-ff57-4e7f-bb2b-a12c988e362c] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 819.397121] env[62503]: DEBUG oslo_concurrency.lockutils [None req-0f7e3892-329e-443e-a8e9-e702f02c6757 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.457s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 819.397807] env[62503]: DEBUG nova.compute.manager [None req-0f7e3892-329e-443e-a8e9-e702f02c6757 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] [instance: 09688e22-9225-4619-a9aa-eddb332cb8ab] Start building networks asynchronously for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2832}} [ 819.400813] env[62503]: DEBUG oslo_concurrency.lockutils [None req-d5689ca1-10ce-4a97-b742-119ca139fe26 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 17.873s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 819.401543] env[62503]: DEBUG nova.objects.instance [None req-d5689ca1-10ce-4a97-b742-119ca139fe26 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Lazy-loading 'resources' on Instance uuid 141d7d04-0267-4e15-90ed-112ac8fb8c9b {{(pid=62503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 819.412741] env[62503]: DEBUG oslo_vmware.api [None req-337e5891-71cd-4f0a-a7c5-d668c349bcf9 tempest-ServersAaction247Test-815974925 tempest-ServersAaction247Test-815974925-project-member] Task: {'id': task-1387769, 'name': ReconfigVM_Task, 'duration_secs': 0.254841} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 819.413486] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-337e5891-71cd-4f0a-a7c5-d668c349bcf9 tempest-ServersAaction247Test-815974925 tempest-ServersAaction247Test-815974925-project-member] [instance: ef92e4ba-4ef3-4e26-9577-bad0c046ed47] Reconfigured VM instance instance-0000003a to attach disk [datastore1] ef92e4ba-4ef3-4e26-9577-bad0c046ed47/ef92e4ba-4ef3-4e26-9577-bad0c046ed47.vmdk or device None with type sparse {{(pid=62503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 819.414061] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ccc9f0e3-ee6a-4d95-ac68-a51de69abf90 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.420373] env[62503]: DEBUG oslo_vmware.api [None req-337e5891-71cd-4f0a-a7c5-d668c349bcf9 tempest-ServersAaction247Test-815974925 tempest-ServersAaction247Test-815974925-project-member] Waiting for the task: (returnval){ [ 819.420373] env[62503]: value = "task-1387770" [ 819.420373] env[62503]: _type = "Task" [ 819.420373] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 819.428315] env[62503]: DEBUG oslo_vmware.api [None req-337e5891-71cd-4f0a-a7c5-d668c349bcf9 tempest-ServersAaction247Test-815974925 tempest-ServersAaction247Test-815974925-project-member] Task: {'id': task-1387770, 'name': Rename_Task} progress is 5%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 819.569762] env[62503]: DEBUG nova.compute.manager [req-7d16441e-8e1c-4f0a-9630-cec57277f144 req-4867b10e-e925-42c7-890e-cb34c60dd369 service nova] [instance: 4cb117e3-ff57-4e7f-bb2b-a12c988e362c] Received event network-vif-deleted-bf3af8e0-63c1-45af-8f69-da97b8b24149 {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 819.631281] env[62503]: DEBUG oslo_vmware.api [None req-0e26226b-2af4-47a7-8eef-0a1c99867cff tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Task: {'id': task-1387768, 'name': PowerOnVM_Task, 'duration_secs': 0.947176} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 819.631473] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e26226b-2af4-47a7-8eef-0a1c99867cff tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] [instance: 86422990-4215-4628-a7a7-4fdc910e304e] Powered on the VM {{(pid=62503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 819.631675] env[62503]: INFO nova.compute.manager [None req-0e26226b-2af4-47a7-8eef-0a1c99867cff tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] [instance: 86422990-4215-4628-a7a7-4fdc910e304e] Took 6.32 seconds to spawn the instance on the hypervisor. [ 819.631932] env[62503]: DEBUG nova.compute.manager [None req-0e26226b-2af4-47a7-8eef-0a1c99867cff tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] [instance: 86422990-4215-4628-a7a7-4fdc910e304e] Checking state {{(pid=62503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1794}} [ 819.632902] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30321b9f-c48c-41f2-b2fb-817ed49f4b6e {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.804034] env[62503]: DEBUG oslo_concurrency.lockutils [None req-47a18854-ca82-4c3a-9a6e-798e903be44b tempest-ServerRescueTestJSONUnderV235-862506334 tempest-ServerRescueTestJSONUnderV235-862506334-project-member] Releasing lock "refresh_cache-4cb117e3-ff57-4e7f-bb2b-a12c988e362c" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 819.804285] env[62503]: DEBUG nova.compute.manager [None req-47a18854-ca82-4c3a-9a6e-798e903be44b tempest-ServerRescueTestJSONUnderV235-862506334 tempest-ServerRescueTestJSONUnderV235-862506334-project-member] [instance: 4cb117e3-ff57-4e7f-bb2b-a12c988e362c] Start destroying the instance on the hypervisor. {{(pid=62503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3156}} [ 819.804525] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-47a18854-ca82-4c3a-9a6e-798e903be44b tempest-ServerRescueTestJSONUnderV235-862506334 tempest-ServerRescueTestJSONUnderV235-862506334-project-member] [instance: 4cb117e3-ff57-4e7f-bb2b-a12c988e362c] Destroying instance {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 819.804858] env[62503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e0b4b902-e417-47f9-9894-2753d4dd845a {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.813373] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46ef8e1b-1bf5-4864-b106-e1d4b53b2f02 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.835859] env[62503]: WARNING nova.virt.vmwareapi.vmops [None req-47a18854-ca82-4c3a-9a6e-798e903be44b tempest-ServerRescueTestJSONUnderV235-862506334 tempest-ServerRescueTestJSONUnderV235-862506334-project-member] [instance: 4cb117e3-ff57-4e7f-bb2b-a12c988e362c] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 4cb117e3-ff57-4e7f-bb2b-a12c988e362c could not be found. [ 819.836082] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-47a18854-ca82-4c3a-9a6e-798e903be44b tempest-ServerRescueTestJSONUnderV235-862506334 tempest-ServerRescueTestJSONUnderV235-862506334-project-member] [instance: 4cb117e3-ff57-4e7f-bb2b-a12c988e362c] Instance destroyed {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 819.836223] env[62503]: INFO nova.compute.manager [None req-47a18854-ca82-4c3a-9a6e-798e903be44b tempest-ServerRescueTestJSONUnderV235-862506334 tempest-ServerRescueTestJSONUnderV235-862506334-project-member] [instance: 4cb117e3-ff57-4e7f-bb2b-a12c988e362c] Took 0.03 seconds to destroy the instance on the hypervisor. [ 819.836480] env[62503]: DEBUG oslo.service.loopingcall [None req-47a18854-ca82-4c3a-9a6e-798e903be44b tempest-ServerRescueTestJSONUnderV235-862506334 tempest-ServerRescueTestJSONUnderV235-862506334-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 819.836705] env[62503]: DEBUG nova.compute.manager [-] [instance: 4cb117e3-ff57-4e7f-bb2b-a12c988e362c] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 819.836793] env[62503]: DEBUG nova.network.neutron [-] [instance: 4cb117e3-ff57-4e7f-bb2b-a12c988e362c] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 819.851706] env[62503]: DEBUG nova.network.neutron [-] [instance: 4cb117e3-ff57-4e7f-bb2b-a12c988e362c] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 819.906202] env[62503]: DEBUG nova.compute.utils [None req-0f7e3892-329e-443e-a8e9-e702f02c6757 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] Using /dev/sd instead of None {{(pid=62503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 819.911562] env[62503]: DEBUG nova.compute.manager [None req-0f7e3892-329e-443e-a8e9-e702f02c6757 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] [instance: 09688e22-9225-4619-a9aa-eddb332cb8ab] Allocating IP information in the background. {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 819.911742] env[62503]: DEBUG nova.network.neutron [None req-0f7e3892-329e-443e-a8e9-e702f02c6757 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] [instance: 09688e22-9225-4619-a9aa-eddb332cb8ab] allocate_for_instance() {{(pid=62503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 819.930675] env[62503]: DEBUG oslo_vmware.api [None req-337e5891-71cd-4f0a-a7c5-d668c349bcf9 tempest-ServersAaction247Test-815974925 tempest-ServersAaction247Test-815974925-project-member] Task: {'id': task-1387770, 'name': Rename_Task, 'duration_secs': 0.124143} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 819.933243] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-337e5891-71cd-4f0a-a7c5-d668c349bcf9 tempest-ServersAaction247Test-815974925 tempest-ServersAaction247Test-815974925-project-member] [instance: ef92e4ba-4ef3-4e26-9577-bad0c046ed47] Powering on the VM {{(pid=62503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 819.933662] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c1113236-7dc8-441c-95ac-a14e46b0a130 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.940187] env[62503]: DEBUG oslo_vmware.api [None req-337e5891-71cd-4f0a-a7c5-d668c349bcf9 tempest-ServersAaction247Test-815974925 tempest-ServersAaction247Test-815974925-project-member] Waiting for the task: (returnval){ [ 819.940187] env[62503]: value = "task-1387771" [ 819.940187] env[62503]: _type = "Task" [ 819.940187] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 819.948130] env[62503]: DEBUG oslo_vmware.api [None req-337e5891-71cd-4f0a-a7c5-d668c349bcf9 tempest-ServersAaction247Test-815974925 tempest-ServersAaction247Test-815974925-project-member] Task: {'id': task-1387771, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 819.963342] env[62503]: DEBUG nova.policy [None req-0f7e3892-329e-443e-a8e9-e702f02c6757 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '410af55086a5487ea0e284705060593d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '622a848b6d634ad09e3a25a19ebb4916', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62503) authorize /opt/stack/nova/nova/policy.py:201}} [ 820.152016] env[62503]: INFO nova.compute.manager [None req-0e26226b-2af4-47a7-8eef-0a1c99867cff tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] [instance: 86422990-4215-4628-a7a7-4fdc910e304e] Took 36.95 seconds to build instance. [ 820.235566] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6272b8d-f9ac-4527-8ac2-fb65381ddd8a {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.244101] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ba535ad-f4b7-433d-95e0-fa200a7f2f83 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.277942] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad3e1c5d-faf7-4e81-9271-3e89e39a89b2 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.285588] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-863fb468-a5e7-4b55-9501-a3d128c05ccc {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.299599] env[62503]: DEBUG nova.compute.provider_tree [None req-d5689ca1-10ce-4a97-b742-119ca139fe26 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 820.302333] env[62503]: DEBUG nova.network.neutron [None req-0f7e3892-329e-443e-a8e9-e702f02c6757 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] [instance: 09688e22-9225-4619-a9aa-eddb332cb8ab] Successfully created port: 016961c8-b433-418d-a1a0-981912785d4c {{(pid=62503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 820.354572] env[62503]: DEBUG nova.network.neutron [-] [instance: 4cb117e3-ff57-4e7f-bb2b-a12c988e362c] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 820.409558] env[62503]: DEBUG nova.compute.manager [None req-0f7e3892-329e-443e-a8e9-e702f02c6757 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] [instance: 09688e22-9225-4619-a9aa-eddb332cb8ab] Start building block device mappings for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2867}} [ 820.456608] env[62503]: DEBUG oslo_vmware.api [None req-337e5891-71cd-4f0a-a7c5-d668c349bcf9 tempest-ServersAaction247Test-815974925 tempest-ServersAaction247Test-815974925-project-member] Task: {'id': task-1387771, 'name': PowerOnVM_Task, 'duration_secs': 0.457984} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 820.456608] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-337e5891-71cd-4f0a-a7c5-d668c349bcf9 tempest-ServersAaction247Test-815974925 tempest-ServersAaction247Test-815974925-project-member] [instance: ef92e4ba-4ef3-4e26-9577-bad0c046ed47] Powered on the VM {{(pid=62503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 820.456608] env[62503]: INFO nova.compute.manager [None req-337e5891-71cd-4f0a-a7c5-d668c349bcf9 tempest-ServersAaction247Test-815974925 tempest-ServersAaction247Test-815974925-project-member] [instance: ef92e4ba-4ef3-4e26-9577-bad0c046ed47] Took 4.81 seconds to spawn the instance on the hypervisor. [ 820.456772] env[62503]: DEBUG nova.compute.manager [None req-337e5891-71cd-4f0a-a7c5-d668c349bcf9 tempest-ServersAaction247Test-815974925 tempest-ServersAaction247Test-815974925-project-member] [instance: ef92e4ba-4ef3-4e26-9577-bad0c046ed47] Checking state {{(pid=62503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1794}} [ 820.458188] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd486dc8-a34a-44da-9df9-c470e74cdfc9 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.653901] env[62503]: DEBUG oslo_concurrency.lockutils [None req-0e26226b-2af4-47a7-8eef-0a1c99867cff tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Lock "86422990-4215-4628-a7a7-4fdc910e304e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 112.185s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 820.809185] env[62503]: DEBUG nova.scheduler.client.report [None req-d5689ca1-10ce-4a97-b742-119ca139fe26 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 820.857402] env[62503]: INFO nova.compute.manager [-] [instance: 4cb117e3-ff57-4e7f-bb2b-a12c988e362c] Took 1.02 seconds to deallocate network for instance. [ 820.859889] env[62503]: DEBUG nova.compute.claims [None req-47a18854-ca82-4c3a-9a6e-798e903be44b tempest-ServerRescueTestJSONUnderV235-862506334 tempest-ServerRescueTestJSONUnderV235-862506334-project-member] [instance: 4cb117e3-ff57-4e7f-bb2b-a12c988e362c] Aborting claim: {{(pid=62503) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 820.860081] env[62503]: DEBUG oslo_concurrency.lockutils [None req-47a18854-ca82-4c3a-9a6e-798e903be44b tempest-ServerRescueTestJSONUnderV235-862506334 tempest-ServerRescueTestJSONUnderV235-862506334-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 820.978306] env[62503]: INFO nova.compute.manager [None req-337e5891-71cd-4f0a-a7c5-d668c349bcf9 tempest-ServersAaction247Test-815974925 tempest-ServersAaction247Test-815974925-project-member] [instance: ef92e4ba-4ef3-4e26-9577-bad0c046ed47] Took 35.94 seconds to build instance. [ 821.127388] env[62503]: INFO nova.compute.manager [None req-5855f130-1d8c-4319-bf40-572a18bbb2a5 tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] [instance: 86422990-4215-4628-a7a7-4fdc910e304e] Rebuilding instance [ 821.158790] env[62503]: DEBUG nova.compute.manager [None req-578ece02-e979-44e0-9e11-eefd26357b90 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: c5f2cc73-6bcd-4422-890b-3299d4cf4534] Starting instance... {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 821.177779] env[62503]: DEBUG nova.compute.manager [None req-5855f130-1d8c-4319-bf40-572a18bbb2a5 tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] [instance: 86422990-4215-4628-a7a7-4fdc910e304e] Checking state {{(pid=62503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1794}} [ 821.177779] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de1282fe-4ef2-400e-9718-0efaf22cde3d {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.251849] env[62503]: ERROR nova.compute.manager [None req-0f7e3892-329e-443e-a8e9-e702f02c6757 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 016961c8-b433-418d-a1a0-981912785d4c, please check neutron logs for more information. [ 821.251849] env[62503]: ERROR nova.compute.manager Traceback (most recent call last): [ 821.251849] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 821.251849] env[62503]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 821.251849] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 821.251849] env[62503]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 821.251849] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 821.251849] env[62503]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 821.251849] env[62503]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 821.251849] env[62503]: ERROR nova.compute.manager self.force_reraise() [ 821.251849] env[62503]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 821.251849] env[62503]: ERROR nova.compute.manager raise self.value [ 821.251849] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 821.251849] env[62503]: ERROR nova.compute.manager updated_port = self._update_port( [ 821.251849] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 821.251849] env[62503]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 821.252261] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 821.252261] env[62503]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 821.252261] env[62503]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 016961c8-b433-418d-a1a0-981912785d4c, please check neutron logs for more information. [ 821.252261] env[62503]: ERROR nova.compute.manager [ 821.252261] env[62503]: Traceback (most recent call last): [ 821.252261] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 821.252261] env[62503]: listener.cb(fileno) [ 821.252261] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 821.252261] env[62503]: result = function(*args, **kwargs) [ 821.252261] env[62503]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 821.252261] env[62503]: return func(*args, **kwargs) [ 821.252261] env[62503]: File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 821.252261] env[62503]: raise e [ 821.252261] env[62503]: File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 821.252261] env[62503]: nwinfo = self.network_api.allocate_for_instance( [ 821.252261] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 821.252261] env[62503]: created_port_ids = self._update_ports_for_instance( [ 821.252261] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 821.252261] env[62503]: with excutils.save_and_reraise_exception(): [ 821.252261] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 821.252261] env[62503]: self.force_reraise() [ 821.252261] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 821.252261] env[62503]: raise self.value [ 821.252261] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 821.252261] env[62503]: updated_port = self._update_port( [ 821.252261] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 821.252261] env[62503]: _ensure_no_port_binding_failure(port) [ 821.252261] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 821.252261] env[62503]: raise exception.PortBindingFailed(port_id=port['id']) [ 821.253025] env[62503]: nova.exception.PortBindingFailed: Binding failed for port 016961c8-b433-418d-a1a0-981912785d4c, please check neutron logs for more information. [ 821.253025] env[62503]: Removing descriptor: 14 [ 821.313403] env[62503]: DEBUG oslo_concurrency.lockutils [None req-d5689ca1-10ce-4a97-b742-119ca139fe26 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.912s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 821.315793] env[62503]: DEBUG oslo_concurrency.lockutils [None req-2d6004ed-f8ef-4f69-80bc-a0a595a2cd5a tempest-AttachVolumeTestJSON-1238928908 tempest-AttachVolumeTestJSON-1238928908-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 18.689s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 821.338191] env[62503]: INFO nova.scheduler.client.report [None req-d5689ca1-10ce-4a97-b742-119ca139fe26 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Deleted allocations for instance 141d7d04-0267-4e15-90ed-112ac8fb8c9b [ 821.418994] env[62503]: DEBUG nova.compute.manager [None req-0f7e3892-329e-443e-a8e9-e702f02c6757 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] [instance: 09688e22-9225-4619-a9aa-eddb332cb8ab] Start spawning the instance on the hypervisor. {{(pid=62503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2641}} [ 821.440206] env[62503]: DEBUG nova.virt.hardware [None req-0f7e3892-329e-443e-a8e9-e702f02c6757 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:26:20Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:26:03Z,direct_url=,disk_format='vmdk',id=8150ca02-f879-471d-8913-459408f127a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26d9ee75d25b4018b8bb1fb11c8bd98c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:26:04Z,virtual_size=,visibility=), allow threads: False {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 821.440512] env[62503]: DEBUG nova.virt.hardware [None req-0f7e3892-329e-443e-a8e9-e702f02c6757 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] Flavor limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 821.440695] env[62503]: DEBUG nova.virt.hardware [None req-0f7e3892-329e-443e-a8e9-e702f02c6757 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] Image limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 821.440878] env[62503]: DEBUG nova.virt.hardware [None req-0f7e3892-329e-443e-a8e9-e702f02c6757 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] Flavor pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 821.441059] env[62503]: DEBUG nova.virt.hardware [None req-0f7e3892-329e-443e-a8e9-e702f02c6757 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] Image pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 821.441219] env[62503]: DEBUG nova.virt.hardware [None req-0f7e3892-329e-443e-a8e9-e702f02c6757 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 821.441423] env[62503]: DEBUG nova.virt.hardware [None req-0f7e3892-329e-443e-a8e9-e702f02c6757 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 821.441581] env[62503]: DEBUG nova.virt.hardware [None req-0f7e3892-329e-443e-a8e9-e702f02c6757 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 821.441749] env[62503]: DEBUG nova.virt.hardware [None req-0f7e3892-329e-443e-a8e9-e702f02c6757 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] Got 1 possible topologies {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 821.441907] env[62503]: DEBUG nova.virt.hardware [None req-0f7e3892-329e-443e-a8e9-e702f02c6757 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 821.442088] env[62503]: DEBUG nova.virt.hardware [None req-0f7e3892-329e-443e-a8e9-e702f02c6757 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 821.442943] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6c0b1b9-4c13-407d-b837-9bca4abf41df {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.450753] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-503b9e8c-4118-4f09-9732-b4572b7547af {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.464450] env[62503]: ERROR nova.compute.manager [None req-0f7e3892-329e-443e-a8e9-e702f02c6757 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] [instance: 09688e22-9225-4619-a9aa-eddb332cb8ab] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 016961c8-b433-418d-a1a0-981912785d4c, please check neutron logs for more information. [ 821.464450] env[62503]: ERROR nova.compute.manager [instance: 09688e22-9225-4619-a9aa-eddb332cb8ab] Traceback (most recent call last): [ 821.464450] env[62503]: ERROR nova.compute.manager [instance: 09688e22-9225-4619-a9aa-eddb332cb8ab] File "/opt/stack/nova/nova/compute/manager.py", line 2897, in _build_resources [ 821.464450] env[62503]: ERROR nova.compute.manager [instance: 09688e22-9225-4619-a9aa-eddb332cb8ab] yield resources [ 821.464450] env[62503]: ERROR nova.compute.manager [instance: 09688e22-9225-4619-a9aa-eddb332cb8ab] File "/opt/stack/nova/nova/compute/manager.py", line 2644, in _build_and_run_instance [ 821.464450] env[62503]: ERROR nova.compute.manager [instance: 09688e22-9225-4619-a9aa-eddb332cb8ab] self.driver.spawn(context, instance, image_meta, [ 821.464450] env[62503]: ERROR nova.compute.manager [instance: 09688e22-9225-4619-a9aa-eddb332cb8ab] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 821.464450] env[62503]: ERROR nova.compute.manager [instance: 09688e22-9225-4619-a9aa-eddb332cb8ab] self._vmops.spawn(context, instance, image_meta, injected_files, [ 821.464450] env[62503]: ERROR nova.compute.manager [instance: 09688e22-9225-4619-a9aa-eddb332cb8ab] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 821.464450] env[62503]: ERROR nova.compute.manager [instance: 09688e22-9225-4619-a9aa-eddb332cb8ab] vm_ref = self.build_virtual_machine(instance, [ 821.464450] env[62503]: ERROR nova.compute.manager [instance: 09688e22-9225-4619-a9aa-eddb332cb8ab] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 821.464910] env[62503]: ERROR nova.compute.manager [instance: 09688e22-9225-4619-a9aa-eddb332cb8ab] vif_infos = vmwarevif.get_vif_info(self._session, [ 821.464910] env[62503]: ERROR nova.compute.manager [instance: 09688e22-9225-4619-a9aa-eddb332cb8ab] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 821.464910] env[62503]: ERROR nova.compute.manager [instance: 09688e22-9225-4619-a9aa-eddb332cb8ab] for vif in network_info: [ 821.464910] env[62503]: ERROR nova.compute.manager [instance: 09688e22-9225-4619-a9aa-eddb332cb8ab] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 821.464910] env[62503]: ERROR nova.compute.manager [instance: 09688e22-9225-4619-a9aa-eddb332cb8ab] return self._sync_wrapper(fn, *args, **kwargs) [ 821.464910] env[62503]: ERROR nova.compute.manager [instance: 09688e22-9225-4619-a9aa-eddb332cb8ab] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 821.464910] env[62503]: ERROR nova.compute.manager [instance: 09688e22-9225-4619-a9aa-eddb332cb8ab] self.wait() [ 821.464910] env[62503]: ERROR nova.compute.manager [instance: 09688e22-9225-4619-a9aa-eddb332cb8ab] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 821.464910] env[62503]: ERROR nova.compute.manager [instance: 09688e22-9225-4619-a9aa-eddb332cb8ab] self[:] = self._gt.wait() [ 821.464910] env[62503]: ERROR nova.compute.manager [instance: 09688e22-9225-4619-a9aa-eddb332cb8ab] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 821.464910] env[62503]: ERROR nova.compute.manager [instance: 09688e22-9225-4619-a9aa-eddb332cb8ab] return self._exit_event.wait() [ 821.464910] env[62503]: ERROR nova.compute.manager [instance: 09688e22-9225-4619-a9aa-eddb332cb8ab] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 821.464910] env[62503]: ERROR nova.compute.manager [instance: 09688e22-9225-4619-a9aa-eddb332cb8ab] current.throw(*self._exc) [ 821.465291] env[62503]: ERROR nova.compute.manager [instance: 09688e22-9225-4619-a9aa-eddb332cb8ab] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 821.465291] env[62503]: ERROR nova.compute.manager [instance: 09688e22-9225-4619-a9aa-eddb332cb8ab] result = function(*args, **kwargs) [ 821.465291] env[62503]: ERROR nova.compute.manager [instance: 09688e22-9225-4619-a9aa-eddb332cb8ab] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 821.465291] env[62503]: ERROR nova.compute.manager [instance: 09688e22-9225-4619-a9aa-eddb332cb8ab] return func(*args, **kwargs) [ 821.465291] env[62503]: ERROR nova.compute.manager [instance: 09688e22-9225-4619-a9aa-eddb332cb8ab] File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 821.465291] env[62503]: ERROR nova.compute.manager [instance: 09688e22-9225-4619-a9aa-eddb332cb8ab] raise e [ 821.465291] env[62503]: ERROR nova.compute.manager [instance: 09688e22-9225-4619-a9aa-eddb332cb8ab] File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 821.465291] env[62503]: ERROR nova.compute.manager [instance: 09688e22-9225-4619-a9aa-eddb332cb8ab] nwinfo = self.network_api.allocate_for_instance( [ 821.465291] env[62503]: ERROR nova.compute.manager [instance: 09688e22-9225-4619-a9aa-eddb332cb8ab] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 821.465291] env[62503]: ERROR nova.compute.manager [instance: 09688e22-9225-4619-a9aa-eddb332cb8ab] created_port_ids = self._update_ports_for_instance( [ 821.465291] env[62503]: ERROR nova.compute.manager [instance: 09688e22-9225-4619-a9aa-eddb332cb8ab] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 821.465291] env[62503]: ERROR nova.compute.manager [instance: 09688e22-9225-4619-a9aa-eddb332cb8ab] with excutils.save_and_reraise_exception(): [ 821.465291] env[62503]: ERROR nova.compute.manager [instance: 09688e22-9225-4619-a9aa-eddb332cb8ab] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 821.465613] env[62503]: ERROR nova.compute.manager [instance: 09688e22-9225-4619-a9aa-eddb332cb8ab] self.force_reraise() [ 821.465613] env[62503]: ERROR nova.compute.manager [instance: 09688e22-9225-4619-a9aa-eddb332cb8ab] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 821.465613] env[62503]: ERROR nova.compute.manager [instance: 09688e22-9225-4619-a9aa-eddb332cb8ab] raise self.value [ 821.465613] env[62503]: ERROR nova.compute.manager [instance: 09688e22-9225-4619-a9aa-eddb332cb8ab] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 821.465613] env[62503]: ERROR nova.compute.manager [instance: 09688e22-9225-4619-a9aa-eddb332cb8ab] updated_port = self._update_port( [ 821.465613] env[62503]: ERROR nova.compute.manager [instance: 09688e22-9225-4619-a9aa-eddb332cb8ab] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 821.465613] env[62503]: ERROR nova.compute.manager [instance: 09688e22-9225-4619-a9aa-eddb332cb8ab] _ensure_no_port_binding_failure(port) [ 821.465613] env[62503]: ERROR nova.compute.manager [instance: 09688e22-9225-4619-a9aa-eddb332cb8ab] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 821.465613] env[62503]: ERROR nova.compute.manager [instance: 09688e22-9225-4619-a9aa-eddb332cb8ab] raise exception.PortBindingFailed(port_id=port['id']) [ 821.465613] env[62503]: ERROR nova.compute.manager [instance: 09688e22-9225-4619-a9aa-eddb332cb8ab] nova.exception.PortBindingFailed: Binding failed for port 016961c8-b433-418d-a1a0-981912785d4c, please check neutron logs for more information. [ 821.465613] env[62503]: ERROR nova.compute.manager [instance: 09688e22-9225-4619-a9aa-eddb332cb8ab] [ 821.465613] env[62503]: INFO nova.compute.manager [None req-0f7e3892-329e-443e-a8e9-e702f02c6757 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] [instance: 09688e22-9225-4619-a9aa-eddb332cb8ab] Terminating instance [ 821.466684] env[62503]: DEBUG oslo_concurrency.lockutils [None req-0f7e3892-329e-443e-a8e9-e702f02c6757 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] Acquiring lock "refresh_cache-09688e22-9225-4619-a9aa-eddb332cb8ab" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 821.466845] env[62503]: DEBUG oslo_concurrency.lockutils [None req-0f7e3892-329e-443e-a8e9-e702f02c6757 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] Acquired lock "refresh_cache-09688e22-9225-4619-a9aa-eddb332cb8ab" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 821.467029] env[62503]: DEBUG nova.network.neutron [None req-0f7e3892-329e-443e-a8e9-e702f02c6757 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] [instance: 09688e22-9225-4619-a9aa-eddb332cb8ab] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 821.480160] env[62503]: DEBUG oslo_concurrency.lockutils [None req-337e5891-71cd-4f0a-a7c5-d668c349bcf9 tempest-ServersAaction247Test-815974925 tempest-ServersAaction247Test-815974925-project-member] Lock "ef92e4ba-4ef3-4e26-9577-bad0c046ed47" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 108.602s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 821.616253] env[62503]: DEBUG nova.compute.manager [req-dd22a755-0798-40c3-b7a7-0c2a5e1435fa req-0d9ca6b7-4651-4c00-9c89-b4387eeeea78 service nova] [instance: 09688e22-9225-4619-a9aa-eddb332cb8ab] Received event network-changed-016961c8-b433-418d-a1a0-981912785d4c {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 821.616253] env[62503]: DEBUG nova.compute.manager [req-dd22a755-0798-40c3-b7a7-0c2a5e1435fa req-0d9ca6b7-4651-4c00-9c89-b4387eeeea78 service nova] [instance: 09688e22-9225-4619-a9aa-eddb332cb8ab] Refreshing instance network info cache due to event network-changed-016961c8-b433-418d-a1a0-981912785d4c. {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11432}} [ 821.616253] env[62503]: DEBUG oslo_concurrency.lockutils [req-dd22a755-0798-40c3-b7a7-0c2a5e1435fa req-0d9ca6b7-4651-4c00-9c89-b4387eeeea78 service nova] Acquiring lock "refresh_cache-09688e22-9225-4619-a9aa-eddb332cb8ab" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 821.676743] env[62503]: DEBUG oslo_concurrency.lockutils [None req-578ece02-e979-44e0-9e11-eefd26357b90 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 821.851704] env[62503]: DEBUG oslo_concurrency.lockutils [None req-d5689ca1-10ce-4a97-b742-119ca139fe26 tempest-ServerShowV247Test-342787266 tempest-ServerShowV247Test-342787266-project-member] Lock "141d7d04-0267-4e15-90ed-112ac8fb8c9b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 24.073s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 821.984985] env[62503]: DEBUG nova.compute.manager [None req-66c68936-6590-4907-b875-c3ea4a73524c tempest-ServersNegativeTestJSON-1133423638 tempest-ServersNegativeTestJSON-1133423638-project-member] [instance: 1e355e38-60c6-4e7f-beb4-160c4527ec51] Starting instance... {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 821.988414] env[62503]: DEBUG nova.network.neutron [None req-0f7e3892-329e-443e-a8e9-e702f02c6757 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] [instance: 09688e22-9225-4619-a9aa-eddb332cb8ab] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 822.057485] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac91f736-3845-4ee7-b5b3-621eef24b680 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.065135] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b9d88b4-63c7-49c9-a893-6de1babf18bf {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.095392] env[62503]: DEBUG nova.network.neutron [None req-0f7e3892-329e-443e-a8e9-e702f02c6757 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] [instance: 09688e22-9225-4619-a9aa-eddb332cb8ab] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 822.097295] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92631641-3bb7-408e-95b3-4dfac0de32d3 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.104542] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-002581d2-eebd-4bf5-a01d-52aa472f2663 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.118438] env[62503]: DEBUG nova.compute.provider_tree [None req-2d6004ed-f8ef-4f69-80bc-a0a595a2cd5a tempest-AttachVolumeTestJSON-1238928908 tempest-AttachVolumeTestJSON-1238928908-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 822.191799] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-5855f130-1d8c-4319-bf40-572a18bbb2a5 tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] [instance: 86422990-4215-4628-a7a7-4fdc910e304e] Powering off the VM {{(pid=62503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 822.192476] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6394e1bc-3d15-4c05-b999-37b86fb13b67 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.199298] env[62503]: DEBUG oslo_vmware.api [None req-5855f130-1d8c-4319-bf40-572a18bbb2a5 tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Waiting for the task: (returnval){ [ 822.199298] env[62503]: value = "task-1387772" [ 822.199298] env[62503]: _type = "Task" [ 822.199298] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 822.207819] env[62503]: DEBUG oslo_vmware.api [None req-5855f130-1d8c-4319-bf40-572a18bbb2a5 tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Task: {'id': task-1387772, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 822.352141] env[62503]: DEBUG nova.compute.manager [None req-82e60d51-1dc7-4011-9432-14628b703f0d tempest-ServersAaction247Test-815974925 tempest-ServersAaction247Test-815974925-project-member] [instance: ef92e4ba-4ef3-4e26-9577-bad0c046ed47] Checking state {{(pid=62503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1794}} [ 822.353068] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3aba5b4-6993-42cc-a75e-2954159118e9 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.421376] env[62503]: DEBUG oslo_concurrency.lockutils [None req-7d7f8f25-2959-4048-b880-cb44f0eba2e8 tempest-ServersAaction247Test-815974925 tempest-ServersAaction247Test-815974925-project-member] Acquiring lock "ef92e4ba-4ef3-4e26-9577-bad0c046ed47" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 822.422044] env[62503]: DEBUG oslo_concurrency.lockutils [None req-7d7f8f25-2959-4048-b880-cb44f0eba2e8 tempest-ServersAaction247Test-815974925 tempest-ServersAaction247Test-815974925-project-member] Lock "ef92e4ba-4ef3-4e26-9577-bad0c046ed47" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 822.422044] env[62503]: DEBUG oslo_concurrency.lockutils [None req-7d7f8f25-2959-4048-b880-cb44f0eba2e8 tempest-ServersAaction247Test-815974925 tempest-ServersAaction247Test-815974925-project-member] Acquiring lock "ef92e4ba-4ef3-4e26-9577-bad0c046ed47-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 822.422234] env[62503]: DEBUG oslo_concurrency.lockutils [None req-7d7f8f25-2959-4048-b880-cb44f0eba2e8 tempest-ServersAaction247Test-815974925 tempest-ServersAaction247Test-815974925-project-member] Lock "ef92e4ba-4ef3-4e26-9577-bad0c046ed47-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 822.422417] env[62503]: DEBUG oslo_concurrency.lockutils [None req-7d7f8f25-2959-4048-b880-cb44f0eba2e8 tempest-ServersAaction247Test-815974925 tempest-ServersAaction247Test-815974925-project-member] Lock "ef92e4ba-4ef3-4e26-9577-bad0c046ed47-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 822.424678] env[62503]: INFO nova.compute.manager [None req-7d7f8f25-2959-4048-b880-cb44f0eba2e8 tempest-ServersAaction247Test-815974925 tempest-ServersAaction247Test-815974925-project-member] [instance: ef92e4ba-4ef3-4e26-9577-bad0c046ed47] Terminating instance [ 822.426423] env[62503]: DEBUG oslo_concurrency.lockutils [None req-7d7f8f25-2959-4048-b880-cb44f0eba2e8 tempest-ServersAaction247Test-815974925 tempest-ServersAaction247Test-815974925-project-member] Acquiring lock "refresh_cache-ef92e4ba-4ef3-4e26-9577-bad0c046ed47" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 822.426588] env[62503]: DEBUG oslo_concurrency.lockutils [None req-7d7f8f25-2959-4048-b880-cb44f0eba2e8 tempest-ServersAaction247Test-815974925 tempest-ServersAaction247Test-815974925-project-member] Acquired lock "refresh_cache-ef92e4ba-4ef3-4e26-9577-bad0c046ed47" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 822.427425] env[62503]: DEBUG nova.network.neutron [None req-7d7f8f25-2959-4048-b880-cb44f0eba2e8 tempest-ServersAaction247Test-815974925 tempest-ServersAaction247Test-815974925-project-member] [instance: ef92e4ba-4ef3-4e26-9577-bad0c046ed47] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 822.506140] env[62503]: DEBUG oslo_concurrency.lockutils [None req-66c68936-6590-4907-b875-c3ea4a73524c tempest-ServersNegativeTestJSON-1133423638 tempest-ServersNegativeTestJSON-1133423638-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 822.600458] env[62503]: DEBUG oslo_concurrency.lockutils [None req-0f7e3892-329e-443e-a8e9-e702f02c6757 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] Releasing lock "refresh_cache-09688e22-9225-4619-a9aa-eddb332cb8ab" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 822.600965] env[62503]: DEBUG nova.compute.manager [None req-0f7e3892-329e-443e-a8e9-e702f02c6757 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] [instance: 09688e22-9225-4619-a9aa-eddb332cb8ab] Start destroying the instance on the hypervisor. {{(pid=62503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3156}} [ 822.601218] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-0f7e3892-329e-443e-a8e9-e702f02c6757 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] [instance: 09688e22-9225-4619-a9aa-eddb332cb8ab] Destroying instance {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 822.601584] env[62503]: DEBUG oslo_concurrency.lockutils [req-dd22a755-0798-40c3-b7a7-0c2a5e1435fa req-0d9ca6b7-4651-4c00-9c89-b4387eeeea78 service nova] Acquired lock "refresh_cache-09688e22-9225-4619-a9aa-eddb332cb8ab" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 822.601806] env[62503]: DEBUG nova.network.neutron [req-dd22a755-0798-40c3-b7a7-0c2a5e1435fa req-0d9ca6b7-4651-4c00-9c89-b4387eeeea78 service nova] [instance: 09688e22-9225-4619-a9aa-eddb332cb8ab] Refreshing network info cache for port 016961c8-b433-418d-a1a0-981912785d4c {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 822.602894] env[62503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-46fb74d2-444c-420c-94d4-cf8866fdfe5b {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.612718] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d28de009-6b23-47a0-ab4c-3c66958ab130 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.624987] env[62503]: DEBUG nova.scheduler.client.report [None req-2d6004ed-f8ef-4f69-80bc-a0a595a2cd5a tempest-AttachVolumeTestJSON-1238928908 tempest-AttachVolumeTestJSON-1238928908-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 822.640848] env[62503]: WARNING nova.virt.vmwareapi.vmops [None req-0f7e3892-329e-443e-a8e9-e702f02c6757 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] [instance: 09688e22-9225-4619-a9aa-eddb332cb8ab] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 09688e22-9225-4619-a9aa-eddb332cb8ab could not be found. [ 822.641089] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-0f7e3892-329e-443e-a8e9-e702f02c6757 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] [instance: 09688e22-9225-4619-a9aa-eddb332cb8ab] Instance destroyed {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 822.641279] env[62503]: INFO nova.compute.manager [None req-0f7e3892-329e-443e-a8e9-e702f02c6757 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] [instance: 09688e22-9225-4619-a9aa-eddb332cb8ab] Took 0.04 seconds to destroy the instance on the hypervisor. [ 822.641528] env[62503]: DEBUG oslo.service.loopingcall [None req-0f7e3892-329e-443e-a8e9-e702f02c6757 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 822.642254] env[62503]: DEBUG nova.compute.manager [-] [instance: 09688e22-9225-4619-a9aa-eddb332cb8ab] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 822.642428] env[62503]: DEBUG nova.network.neutron [-] [instance: 09688e22-9225-4619-a9aa-eddb332cb8ab] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 822.657102] env[62503]: DEBUG nova.network.neutron [-] [instance: 09688e22-9225-4619-a9aa-eddb332cb8ab] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 822.709009] env[62503]: DEBUG oslo_vmware.api [None req-5855f130-1d8c-4319-bf40-572a18bbb2a5 tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Task: {'id': task-1387772, 'name': PowerOffVM_Task, 'duration_secs': 0.11059} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 822.709236] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-5855f130-1d8c-4319-bf40-572a18bbb2a5 tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] [instance: 86422990-4215-4628-a7a7-4fdc910e304e] Powered off the VM {{(pid=62503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 822.709429] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-5855f130-1d8c-4319-bf40-572a18bbb2a5 tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] [instance: 86422990-4215-4628-a7a7-4fdc910e304e] Destroying instance {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 822.710228] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b02cb28-d7dd-4e22-a9ba-0443b1ecabc5 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.717022] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-5855f130-1d8c-4319-bf40-572a18bbb2a5 tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] [instance: 86422990-4215-4628-a7a7-4fdc910e304e] Unregistering the VM {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 822.717282] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-69e4e42f-3db7-47b8-b033-302662498f3a {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.744552] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-5855f130-1d8c-4319-bf40-572a18bbb2a5 tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] [instance: 86422990-4215-4628-a7a7-4fdc910e304e] Unregistered the VM {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 822.744944] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-5855f130-1d8c-4319-bf40-572a18bbb2a5 tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] [instance: 86422990-4215-4628-a7a7-4fdc910e304e] Deleting contents of the VM from datastore datastore1 {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 822.745283] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-5855f130-1d8c-4319-bf40-572a18bbb2a5 tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Deleting the datastore file [datastore1] 86422990-4215-4628-a7a7-4fdc910e304e {{(pid=62503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 822.745576] env[62503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f35b5c50-871e-4b42-af2c-332ba55ec78d {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.751736] env[62503]: DEBUG oslo_vmware.api [None req-5855f130-1d8c-4319-bf40-572a18bbb2a5 tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Waiting for the task: (returnval){ [ 822.751736] env[62503]: value = "task-1387774" [ 822.751736] env[62503]: _type = "Task" [ 822.751736] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 822.759316] env[62503]: DEBUG oslo_vmware.api [None req-5855f130-1d8c-4319-bf40-572a18bbb2a5 tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Task: {'id': task-1387774, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 822.864114] env[62503]: INFO nova.compute.manager [None req-82e60d51-1dc7-4011-9432-14628b703f0d tempest-ServersAaction247Test-815974925 tempest-ServersAaction247Test-815974925-project-member] [instance: ef92e4ba-4ef3-4e26-9577-bad0c046ed47] instance snapshotting [ 822.864792] env[62503]: DEBUG nova.objects.instance [None req-82e60d51-1dc7-4011-9432-14628b703f0d tempest-ServersAaction247Test-815974925 tempest-ServersAaction247Test-815974925-project-member] Lazy-loading 'flavor' on Instance uuid ef92e4ba-4ef3-4e26-9577-bad0c046ed47 {{(pid=62503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 822.946221] env[62503]: DEBUG nova.network.neutron [None req-7d7f8f25-2959-4048-b880-cb44f0eba2e8 tempest-ServersAaction247Test-815974925 tempest-ServersAaction247Test-815974925-project-member] [instance: ef92e4ba-4ef3-4e26-9577-bad0c046ed47] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 823.010809] env[62503]: DEBUG nova.network.neutron [None req-7d7f8f25-2959-4048-b880-cb44f0eba2e8 tempest-ServersAaction247Test-815974925 tempest-ServersAaction247Test-815974925-project-member] [instance: ef92e4ba-4ef3-4e26-9577-bad0c046ed47] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 823.122136] env[62503]: DEBUG nova.network.neutron [req-dd22a755-0798-40c3-b7a7-0c2a5e1435fa req-0d9ca6b7-4651-4c00-9c89-b4387eeeea78 service nova] [instance: 09688e22-9225-4619-a9aa-eddb332cb8ab] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 823.129956] env[62503]: DEBUG oslo_concurrency.lockutils [None req-2d6004ed-f8ef-4f69-80bc-a0a595a2cd5a tempest-AttachVolumeTestJSON-1238928908 tempest-AttachVolumeTestJSON-1238928908-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.814s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 823.130622] env[62503]: ERROR nova.compute.manager [None req-2d6004ed-f8ef-4f69-80bc-a0a595a2cd5a tempest-AttachVolumeTestJSON-1238928908 tempest-AttachVolumeTestJSON-1238928908-project-member] [instance: 9eee91c6-a949-453b-8ccd-ba986251ed27] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 64751884-9fbd-4102-a5b7-cdbad8d77b85, please check neutron logs for more information. [ 823.130622] env[62503]: ERROR nova.compute.manager [instance: 9eee91c6-a949-453b-8ccd-ba986251ed27] Traceback (most recent call last): [ 823.130622] env[62503]: ERROR nova.compute.manager [instance: 9eee91c6-a949-453b-8ccd-ba986251ed27] File "/opt/stack/nova/nova/compute/manager.py", line 2644, in _build_and_run_instance [ 823.130622] env[62503]: ERROR nova.compute.manager [instance: 9eee91c6-a949-453b-8ccd-ba986251ed27] self.driver.spawn(context, instance, image_meta, [ 823.130622] env[62503]: ERROR nova.compute.manager [instance: 9eee91c6-a949-453b-8ccd-ba986251ed27] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 823.130622] env[62503]: ERROR nova.compute.manager [instance: 9eee91c6-a949-453b-8ccd-ba986251ed27] self._vmops.spawn(context, instance, image_meta, injected_files, [ 823.130622] env[62503]: ERROR nova.compute.manager [instance: 9eee91c6-a949-453b-8ccd-ba986251ed27] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 823.130622] env[62503]: ERROR nova.compute.manager [instance: 9eee91c6-a949-453b-8ccd-ba986251ed27] vm_ref = self.build_virtual_machine(instance, [ 823.130622] env[62503]: ERROR nova.compute.manager [instance: 9eee91c6-a949-453b-8ccd-ba986251ed27] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 823.130622] env[62503]: ERROR nova.compute.manager [instance: 9eee91c6-a949-453b-8ccd-ba986251ed27] vif_infos = vmwarevif.get_vif_info(self._session, [ 823.130622] env[62503]: ERROR nova.compute.manager [instance: 9eee91c6-a949-453b-8ccd-ba986251ed27] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 823.130902] env[62503]: ERROR nova.compute.manager [instance: 9eee91c6-a949-453b-8ccd-ba986251ed27] for vif in network_info: [ 823.130902] env[62503]: ERROR nova.compute.manager [instance: 9eee91c6-a949-453b-8ccd-ba986251ed27] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 823.130902] env[62503]: ERROR nova.compute.manager [instance: 9eee91c6-a949-453b-8ccd-ba986251ed27] return self._sync_wrapper(fn, *args, **kwargs) [ 823.130902] env[62503]: ERROR nova.compute.manager [instance: 9eee91c6-a949-453b-8ccd-ba986251ed27] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 823.130902] env[62503]: ERROR nova.compute.manager [instance: 9eee91c6-a949-453b-8ccd-ba986251ed27] self.wait() [ 823.130902] env[62503]: ERROR nova.compute.manager [instance: 9eee91c6-a949-453b-8ccd-ba986251ed27] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 823.130902] env[62503]: ERROR nova.compute.manager [instance: 9eee91c6-a949-453b-8ccd-ba986251ed27] self[:] = self._gt.wait() [ 823.130902] env[62503]: ERROR nova.compute.manager [instance: 9eee91c6-a949-453b-8ccd-ba986251ed27] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 823.130902] env[62503]: ERROR nova.compute.manager [instance: 9eee91c6-a949-453b-8ccd-ba986251ed27] return self._exit_event.wait() [ 823.130902] env[62503]: ERROR nova.compute.manager [instance: 9eee91c6-a949-453b-8ccd-ba986251ed27] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 823.130902] env[62503]: ERROR nova.compute.manager [instance: 9eee91c6-a949-453b-8ccd-ba986251ed27] result = hub.switch() [ 823.130902] env[62503]: ERROR nova.compute.manager [instance: 9eee91c6-a949-453b-8ccd-ba986251ed27] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 823.130902] env[62503]: ERROR nova.compute.manager [instance: 9eee91c6-a949-453b-8ccd-ba986251ed27] return self.greenlet.switch() [ 823.131209] env[62503]: ERROR nova.compute.manager [instance: 9eee91c6-a949-453b-8ccd-ba986251ed27] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 823.131209] env[62503]: ERROR nova.compute.manager [instance: 9eee91c6-a949-453b-8ccd-ba986251ed27] result = function(*args, **kwargs) [ 823.131209] env[62503]: ERROR nova.compute.manager [instance: 9eee91c6-a949-453b-8ccd-ba986251ed27] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 823.131209] env[62503]: ERROR nova.compute.manager [instance: 9eee91c6-a949-453b-8ccd-ba986251ed27] return func(*args, **kwargs) [ 823.131209] env[62503]: ERROR nova.compute.manager [instance: 9eee91c6-a949-453b-8ccd-ba986251ed27] File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 823.131209] env[62503]: ERROR nova.compute.manager [instance: 9eee91c6-a949-453b-8ccd-ba986251ed27] raise e [ 823.131209] env[62503]: ERROR nova.compute.manager [instance: 9eee91c6-a949-453b-8ccd-ba986251ed27] File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 823.131209] env[62503]: ERROR nova.compute.manager [instance: 9eee91c6-a949-453b-8ccd-ba986251ed27] nwinfo = self.network_api.allocate_for_instance( [ 823.131209] env[62503]: ERROR nova.compute.manager [instance: 9eee91c6-a949-453b-8ccd-ba986251ed27] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 823.131209] env[62503]: ERROR nova.compute.manager [instance: 9eee91c6-a949-453b-8ccd-ba986251ed27] created_port_ids = self._update_ports_for_instance( [ 823.131209] env[62503]: ERROR nova.compute.manager [instance: 9eee91c6-a949-453b-8ccd-ba986251ed27] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 823.131209] env[62503]: ERROR nova.compute.manager [instance: 9eee91c6-a949-453b-8ccd-ba986251ed27] with excutils.save_and_reraise_exception(): [ 823.131209] env[62503]: ERROR nova.compute.manager [instance: 9eee91c6-a949-453b-8ccd-ba986251ed27] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 823.131498] env[62503]: ERROR nova.compute.manager [instance: 9eee91c6-a949-453b-8ccd-ba986251ed27] self.force_reraise() [ 823.131498] env[62503]: ERROR nova.compute.manager [instance: 9eee91c6-a949-453b-8ccd-ba986251ed27] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 823.131498] env[62503]: ERROR nova.compute.manager [instance: 9eee91c6-a949-453b-8ccd-ba986251ed27] raise self.value [ 823.131498] env[62503]: ERROR nova.compute.manager [instance: 9eee91c6-a949-453b-8ccd-ba986251ed27] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 823.131498] env[62503]: ERROR nova.compute.manager [instance: 9eee91c6-a949-453b-8ccd-ba986251ed27] updated_port = self._update_port( [ 823.131498] env[62503]: ERROR nova.compute.manager [instance: 9eee91c6-a949-453b-8ccd-ba986251ed27] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 823.131498] env[62503]: ERROR nova.compute.manager [instance: 9eee91c6-a949-453b-8ccd-ba986251ed27] _ensure_no_port_binding_failure(port) [ 823.131498] env[62503]: ERROR nova.compute.manager [instance: 9eee91c6-a949-453b-8ccd-ba986251ed27] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 823.131498] env[62503]: ERROR nova.compute.manager [instance: 9eee91c6-a949-453b-8ccd-ba986251ed27] raise exception.PortBindingFailed(port_id=port['id']) [ 823.131498] env[62503]: ERROR nova.compute.manager [instance: 9eee91c6-a949-453b-8ccd-ba986251ed27] nova.exception.PortBindingFailed: Binding failed for port 64751884-9fbd-4102-a5b7-cdbad8d77b85, please check neutron logs for more information. [ 823.131498] env[62503]: ERROR nova.compute.manager [instance: 9eee91c6-a949-453b-8ccd-ba986251ed27] [ 823.131846] env[62503]: DEBUG nova.compute.utils [None req-2d6004ed-f8ef-4f69-80bc-a0a595a2cd5a tempest-AttachVolumeTestJSON-1238928908 tempest-AttachVolumeTestJSON-1238928908-project-member] [instance: 9eee91c6-a949-453b-8ccd-ba986251ed27] Binding failed for port 64751884-9fbd-4102-a5b7-cdbad8d77b85, please check neutron logs for more information. {{(pid=62503) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 823.132846] env[62503]: DEBUG oslo_concurrency.lockutils [None req-aaad013a-a4dc-432c-87d1-a452b2fca374 tempest-ServerPasswordTestJSON-330364349 tempest-ServerPasswordTestJSON-330364349-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.422s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 823.134560] env[62503]: INFO nova.compute.claims [None req-aaad013a-a4dc-432c-87d1-a452b2fca374 tempest-ServerPasswordTestJSON-330364349 tempest-ServerPasswordTestJSON-330364349-project-member] [instance: b1fc7438-2078-435a-9754-19a8a1bc6f5c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 823.137186] env[62503]: DEBUG nova.compute.manager [None req-2d6004ed-f8ef-4f69-80bc-a0a595a2cd5a tempest-AttachVolumeTestJSON-1238928908 tempest-AttachVolumeTestJSON-1238928908-project-member] [instance: 9eee91c6-a949-453b-8ccd-ba986251ed27] Build of instance 9eee91c6-a949-453b-8ccd-ba986251ed27 was re-scheduled: Binding failed for port 64751884-9fbd-4102-a5b7-cdbad8d77b85, please check neutron logs for more information. {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2483}} [ 823.137791] env[62503]: DEBUG nova.compute.manager [None req-2d6004ed-f8ef-4f69-80bc-a0a595a2cd5a tempest-AttachVolumeTestJSON-1238928908 tempest-AttachVolumeTestJSON-1238928908-project-member] [instance: 9eee91c6-a949-453b-8ccd-ba986251ed27] Unplugging VIFs for instance {{(pid=62503) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3009}} [ 823.138041] env[62503]: DEBUG oslo_concurrency.lockutils [None req-2d6004ed-f8ef-4f69-80bc-a0a595a2cd5a tempest-AttachVolumeTestJSON-1238928908 tempest-AttachVolumeTestJSON-1238928908-project-member] Acquiring lock "refresh_cache-9eee91c6-a949-453b-8ccd-ba986251ed27" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 823.139350] env[62503]: DEBUG oslo_concurrency.lockutils [None req-2d6004ed-f8ef-4f69-80bc-a0a595a2cd5a tempest-AttachVolumeTestJSON-1238928908 tempest-AttachVolumeTestJSON-1238928908-project-member] Acquired lock "refresh_cache-9eee91c6-a949-453b-8ccd-ba986251ed27" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 823.139350] env[62503]: DEBUG nova.network.neutron [None req-2d6004ed-f8ef-4f69-80bc-a0a595a2cd5a tempest-AttachVolumeTestJSON-1238928908 tempest-AttachVolumeTestJSON-1238928908-project-member] [instance: 9eee91c6-a949-453b-8ccd-ba986251ed27] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 823.159318] env[62503]: DEBUG nova.network.neutron [-] [instance: 09688e22-9225-4619-a9aa-eddb332cb8ab] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 823.220916] env[62503]: DEBUG nova.network.neutron [req-dd22a755-0798-40c3-b7a7-0c2a5e1435fa req-0d9ca6b7-4651-4c00-9c89-b4387eeeea78 service nova] [instance: 09688e22-9225-4619-a9aa-eddb332cb8ab] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 823.261799] env[62503]: DEBUG oslo_vmware.api [None req-5855f130-1d8c-4319-bf40-572a18bbb2a5 tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Task: {'id': task-1387774, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.100924} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 823.262071] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-5855f130-1d8c-4319-bf40-572a18bbb2a5 tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Deleted the datastore file {{(pid=62503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 823.262265] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-5855f130-1d8c-4319-bf40-572a18bbb2a5 tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] [instance: 86422990-4215-4628-a7a7-4fdc910e304e] Deleted contents of the VM from datastore datastore1 {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 823.262438] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-5855f130-1d8c-4319-bf40-572a18bbb2a5 tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] [instance: 86422990-4215-4628-a7a7-4fdc910e304e] Instance destroyed {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 823.371011] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ef747cf-fcc0-4cf6-82b5-04be4395438e {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.389392] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e03551e-51f5-4016-8dbe-de7f622f6897 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.513864] env[62503]: DEBUG oslo_concurrency.lockutils [None req-7d7f8f25-2959-4048-b880-cb44f0eba2e8 tempest-ServersAaction247Test-815974925 tempest-ServersAaction247Test-815974925-project-member] Releasing lock "refresh_cache-ef92e4ba-4ef3-4e26-9577-bad0c046ed47" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 823.514308] env[62503]: DEBUG nova.compute.manager [None req-7d7f8f25-2959-4048-b880-cb44f0eba2e8 tempest-ServersAaction247Test-815974925 tempest-ServersAaction247Test-815974925-project-member] [instance: ef92e4ba-4ef3-4e26-9577-bad0c046ed47] Start destroying the instance on the hypervisor. {{(pid=62503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3156}} [ 823.514497] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-7d7f8f25-2959-4048-b880-cb44f0eba2e8 tempest-ServersAaction247Test-815974925 tempest-ServersAaction247Test-815974925-project-member] [instance: ef92e4ba-4ef3-4e26-9577-bad0c046ed47] Destroying instance {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 823.515414] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-220a79e4-84b8-4299-879c-9465f0a085bb {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.523089] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-7d7f8f25-2959-4048-b880-cb44f0eba2e8 tempest-ServersAaction247Test-815974925 tempest-ServersAaction247Test-815974925-project-member] [instance: ef92e4ba-4ef3-4e26-9577-bad0c046ed47] Powering off the VM {{(pid=62503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 823.523307] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b084ae8c-0d1e-4c9d-85a1-4fcbdd23eb14 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.529647] env[62503]: DEBUG oslo_vmware.api [None req-7d7f8f25-2959-4048-b880-cb44f0eba2e8 tempest-ServersAaction247Test-815974925 tempest-ServersAaction247Test-815974925-project-member] Waiting for the task: (returnval){ [ 823.529647] env[62503]: value = "task-1387775" [ 823.529647] env[62503]: _type = "Task" [ 823.529647] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 823.536950] env[62503]: DEBUG oslo_vmware.api [None req-7d7f8f25-2959-4048-b880-cb44f0eba2e8 tempest-ServersAaction247Test-815974925 tempest-ServersAaction247Test-815974925-project-member] Task: {'id': task-1387775, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.661465] env[62503]: INFO nova.compute.manager [-] [instance: 09688e22-9225-4619-a9aa-eddb332cb8ab] Took 1.02 seconds to deallocate network for instance. [ 823.667098] env[62503]: DEBUG nova.network.neutron [None req-2d6004ed-f8ef-4f69-80bc-a0a595a2cd5a tempest-AttachVolumeTestJSON-1238928908 tempest-AttachVolumeTestJSON-1238928908-project-member] [instance: 9eee91c6-a949-453b-8ccd-ba986251ed27] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 823.669209] env[62503]: DEBUG nova.compute.claims [None req-0f7e3892-329e-443e-a8e9-e702f02c6757 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] [instance: 09688e22-9225-4619-a9aa-eddb332cb8ab] Aborting claim: {{(pid=62503) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 823.669366] env[62503]: DEBUG oslo_concurrency.lockutils [None req-0f7e3892-329e-443e-a8e9-e702f02c6757 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 823.724060] env[62503]: DEBUG oslo_concurrency.lockutils [req-dd22a755-0798-40c3-b7a7-0c2a5e1435fa req-0d9ca6b7-4651-4c00-9c89-b4387eeeea78 service nova] Releasing lock "refresh_cache-09688e22-9225-4619-a9aa-eddb332cb8ab" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 823.724379] env[62503]: DEBUG nova.compute.manager [req-dd22a755-0798-40c3-b7a7-0c2a5e1435fa req-0d9ca6b7-4651-4c00-9c89-b4387eeeea78 service nova] [instance: 09688e22-9225-4619-a9aa-eddb332cb8ab] Received event network-vif-deleted-016961c8-b433-418d-a1a0-981912785d4c {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 823.804279] env[62503]: DEBUG nova.network.neutron [None req-2d6004ed-f8ef-4f69-80bc-a0a595a2cd5a tempest-AttachVolumeTestJSON-1238928908 tempest-AttachVolumeTestJSON-1238928908-project-member] [instance: 9eee91c6-a949-453b-8ccd-ba986251ed27] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 823.900069] env[62503]: DEBUG nova.compute.manager [None req-82e60d51-1dc7-4011-9432-14628b703f0d tempest-ServersAaction247Test-815974925 tempest-ServersAaction247Test-815974925-project-member] [instance: ef92e4ba-4ef3-4e26-9577-bad0c046ed47] Instance disappeared during snapshot {{(pid=62503) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4547}} [ 824.028798] env[62503]: DEBUG nova.compute.manager [None req-82e60d51-1dc7-4011-9432-14628b703f0d tempest-ServersAaction247Test-815974925 tempest-ServersAaction247Test-815974925-project-member] [instance: ef92e4ba-4ef3-4e26-9577-bad0c046ed47] Found 0 images (rotation: 2) {{(pid=62503) _rotate_backups /opt/stack/nova/nova/compute/manager.py:4850}} [ 824.038910] env[62503]: DEBUG oslo_vmware.api [None req-7d7f8f25-2959-4048-b880-cb44f0eba2e8 tempest-ServersAaction247Test-815974925 tempest-ServersAaction247Test-815974925-project-member] Task: {'id': task-1387775, 'name': PowerOffVM_Task, 'duration_secs': 0.120742} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 824.039181] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-7d7f8f25-2959-4048-b880-cb44f0eba2e8 tempest-ServersAaction247Test-815974925 tempest-ServersAaction247Test-815974925-project-member] [instance: ef92e4ba-4ef3-4e26-9577-bad0c046ed47] Powered off the VM {{(pid=62503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 824.039344] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-7d7f8f25-2959-4048-b880-cb44f0eba2e8 tempest-ServersAaction247Test-815974925 tempest-ServersAaction247Test-815974925-project-member] [instance: ef92e4ba-4ef3-4e26-9577-bad0c046ed47] Unregistering the VM {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 824.039623] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-014cc161-50a6-4262-bcae-f3a1ee9d9512 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.064813] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-7d7f8f25-2959-4048-b880-cb44f0eba2e8 tempest-ServersAaction247Test-815974925 tempest-ServersAaction247Test-815974925-project-member] [instance: ef92e4ba-4ef3-4e26-9577-bad0c046ed47] Unregistered the VM {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 824.065079] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-7d7f8f25-2959-4048-b880-cb44f0eba2e8 tempest-ServersAaction247Test-815974925 tempest-ServersAaction247Test-815974925-project-member] [instance: ef92e4ba-4ef3-4e26-9577-bad0c046ed47] Deleting contents of the VM from datastore datastore1 {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 824.065306] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-7d7f8f25-2959-4048-b880-cb44f0eba2e8 tempest-ServersAaction247Test-815974925 tempest-ServersAaction247Test-815974925-project-member] Deleting the datastore file [datastore1] ef92e4ba-4ef3-4e26-9577-bad0c046ed47 {{(pid=62503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 824.065577] env[62503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c1aa752d-f3f5-480b-837b-7b908c451e89 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.071704] env[62503]: DEBUG oslo_vmware.api [None req-7d7f8f25-2959-4048-b880-cb44f0eba2e8 tempest-ServersAaction247Test-815974925 tempest-ServersAaction247Test-815974925-project-member] Waiting for the task: (returnval){ [ 824.071704] env[62503]: value = "task-1387777" [ 824.071704] env[62503]: _type = "Task" [ 824.071704] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 824.079105] env[62503]: DEBUG oslo_vmware.api [None req-7d7f8f25-2959-4048-b880-cb44f0eba2e8 tempest-ServersAaction247Test-815974925 tempest-ServersAaction247Test-815974925-project-member] Task: {'id': task-1387777, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.292254] env[62503]: DEBUG nova.virt.hardware [None req-5855f130-1d8c-4319-bf40-572a18bbb2a5 tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:26:20Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:26:03Z,direct_url=,disk_format='vmdk',id=8150ca02-f879-471d-8913-459408f127a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26d9ee75d25b4018b8bb1fb11c8bd98c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:26:04Z,virtual_size=,visibility=), allow threads: False {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 824.292509] env[62503]: DEBUG nova.virt.hardware [None req-5855f130-1d8c-4319-bf40-572a18bbb2a5 tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Flavor limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 824.292667] env[62503]: DEBUG nova.virt.hardware [None req-5855f130-1d8c-4319-bf40-572a18bbb2a5 tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Image limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 824.292848] env[62503]: DEBUG nova.virt.hardware [None req-5855f130-1d8c-4319-bf40-572a18bbb2a5 tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Flavor pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 824.294503] env[62503]: DEBUG nova.virt.hardware [None req-5855f130-1d8c-4319-bf40-572a18bbb2a5 tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Image pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 824.294503] env[62503]: DEBUG nova.virt.hardware [None req-5855f130-1d8c-4319-bf40-572a18bbb2a5 tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 824.294503] env[62503]: DEBUG nova.virt.hardware [None req-5855f130-1d8c-4319-bf40-572a18bbb2a5 tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 824.294503] env[62503]: DEBUG nova.virt.hardware [None req-5855f130-1d8c-4319-bf40-572a18bbb2a5 tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 824.294503] env[62503]: DEBUG nova.virt.hardware [None req-5855f130-1d8c-4319-bf40-572a18bbb2a5 tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Got 1 possible topologies {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 824.294722] env[62503]: DEBUG nova.virt.hardware [None req-5855f130-1d8c-4319-bf40-572a18bbb2a5 tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 824.294722] env[62503]: DEBUG nova.virt.hardware [None req-5855f130-1d8c-4319-bf40-572a18bbb2a5 tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 824.294811] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc07ecb6-3b43-43ea-9513-a7ce15ae39a1 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.304820] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a1af17a-c19a-4fb5-abe3-3982c70779e5 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.310405] env[62503]: DEBUG oslo_concurrency.lockutils [None req-2d6004ed-f8ef-4f69-80bc-a0a595a2cd5a tempest-AttachVolumeTestJSON-1238928908 tempest-AttachVolumeTestJSON-1238928908-project-member] Releasing lock "refresh_cache-9eee91c6-a949-453b-8ccd-ba986251ed27" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 824.310614] env[62503]: DEBUG nova.compute.manager [None req-2d6004ed-f8ef-4f69-80bc-a0a595a2cd5a tempest-AttachVolumeTestJSON-1238928908 tempest-AttachVolumeTestJSON-1238928908-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62503) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3032}} [ 824.310791] env[62503]: DEBUG nova.compute.manager [None req-2d6004ed-f8ef-4f69-80bc-a0a595a2cd5a tempest-AttachVolumeTestJSON-1238928908 tempest-AttachVolumeTestJSON-1238928908-project-member] [instance: 9eee91c6-a949-453b-8ccd-ba986251ed27] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 824.310953] env[62503]: DEBUG nova.network.neutron [None req-2d6004ed-f8ef-4f69-80bc-a0a595a2cd5a tempest-AttachVolumeTestJSON-1238928908 tempest-AttachVolumeTestJSON-1238928908-project-member] [instance: 9eee91c6-a949-453b-8ccd-ba986251ed27] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 824.322895] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-5855f130-1d8c-4319-bf40-572a18bbb2a5 tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] [instance: 86422990-4215-4628-a7a7-4fdc910e304e] Instance VIF info [] {{(pid=62503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 824.328425] env[62503]: DEBUG oslo.service.loopingcall [None req-5855f130-1d8c-4319-bf40-572a18bbb2a5 tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 824.329217] env[62503]: DEBUG nova.network.neutron [None req-2d6004ed-f8ef-4f69-80bc-a0a595a2cd5a tempest-AttachVolumeTestJSON-1238928908 tempest-AttachVolumeTestJSON-1238928908-project-member] [instance: 9eee91c6-a949-453b-8ccd-ba986251ed27] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 824.334186] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 86422990-4215-4628-a7a7-4fdc910e304e] Creating VM on the ESX host {{(pid=62503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 824.334186] env[62503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-722ccda8-e7fc-4c24-ac38-2f696657bcba {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.352957] env[62503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 824.352957] env[62503]: value = "task-1387778" [ 824.352957] env[62503]: _type = "Task" [ 824.352957] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 824.361257] env[62503]: DEBUG oslo_vmware.api [-] Task: {'id': task-1387778, 'name': CreateVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.367222] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc67d10b-0448-4d69-8b21-a7cc20ff413d {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.375283] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2897e05-500e-496b-b510-85ba6d134752 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.405084] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61874bc6-20fd-43e7-8d5b-2f7d1f24374d {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.412102] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28129809-c17c-4058-ab83-d3fd9a3cfe1e {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.425541] env[62503]: DEBUG nova.compute.provider_tree [None req-aaad013a-a4dc-432c-87d1-a452b2fca374 tempest-ServerPasswordTestJSON-330364349 tempest-ServerPasswordTestJSON-330364349-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 824.581907] env[62503]: DEBUG oslo_vmware.api [None req-7d7f8f25-2959-4048-b880-cb44f0eba2e8 tempest-ServersAaction247Test-815974925 tempest-ServersAaction247Test-815974925-project-member] Task: {'id': task-1387777, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.093357} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 824.582176] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-7d7f8f25-2959-4048-b880-cb44f0eba2e8 tempest-ServersAaction247Test-815974925 tempest-ServersAaction247Test-815974925-project-member] Deleted the datastore file {{(pid=62503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 824.582366] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-7d7f8f25-2959-4048-b880-cb44f0eba2e8 tempest-ServersAaction247Test-815974925 tempest-ServersAaction247Test-815974925-project-member] [instance: ef92e4ba-4ef3-4e26-9577-bad0c046ed47] Deleted contents of the VM from datastore datastore1 {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 824.582542] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-7d7f8f25-2959-4048-b880-cb44f0eba2e8 tempest-ServersAaction247Test-815974925 tempest-ServersAaction247Test-815974925-project-member] [instance: ef92e4ba-4ef3-4e26-9577-bad0c046ed47] Instance destroyed {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 824.582715] env[62503]: INFO nova.compute.manager [None req-7d7f8f25-2959-4048-b880-cb44f0eba2e8 tempest-ServersAaction247Test-815974925 tempest-ServersAaction247Test-815974925-project-member] [instance: ef92e4ba-4ef3-4e26-9577-bad0c046ed47] Took 1.07 seconds to destroy the instance on the hypervisor. [ 824.582954] env[62503]: DEBUG oslo.service.loopingcall [None req-7d7f8f25-2959-4048-b880-cb44f0eba2e8 tempest-ServersAaction247Test-815974925 tempest-ServersAaction247Test-815974925-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 824.583155] env[62503]: DEBUG nova.compute.manager [-] [instance: ef92e4ba-4ef3-4e26-9577-bad0c046ed47] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 824.583248] env[62503]: DEBUG nova.network.neutron [-] [instance: ef92e4ba-4ef3-4e26-9577-bad0c046ed47] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 824.597055] env[62503]: DEBUG nova.network.neutron [-] [instance: ef92e4ba-4ef3-4e26-9577-bad0c046ed47] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 824.833916] env[62503]: DEBUG nova.network.neutron [None req-2d6004ed-f8ef-4f69-80bc-a0a595a2cd5a tempest-AttachVolumeTestJSON-1238928908 tempest-AttachVolumeTestJSON-1238928908-project-member] [instance: 9eee91c6-a949-453b-8ccd-ba986251ed27] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 824.862438] env[62503]: DEBUG oslo_vmware.api [-] Task: {'id': task-1387778, 'name': CreateVM_Task, 'duration_secs': 0.250306} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 824.862606] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 86422990-4215-4628-a7a7-4fdc910e304e] Created VM on the ESX host {{(pid=62503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 824.862995] env[62503]: DEBUG oslo_concurrency.lockutils [None req-5855f130-1d8c-4319-bf40-572a18bbb2a5 tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 824.863175] env[62503]: DEBUG oslo_concurrency.lockutils [None req-5855f130-1d8c-4319-bf40-572a18bbb2a5 tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Acquired lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 824.863479] env[62503]: DEBUG oslo_concurrency.lockutils [None req-5855f130-1d8c-4319-bf40-572a18bbb2a5 tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 824.863723] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b264c56c-5568-492a-a620-c549d12123fe {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.867821] env[62503]: DEBUG oslo_vmware.api [None req-5855f130-1d8c-4319-bf40-572a18bbb2a5 tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Waiting for the task: (returnval){ [ 824.867821] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52d0fc66-6991-289a-20b1-04dd6e11b89d" [ 824.867821] env[62503]: _type = "Task" [ 824.867821] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 824.875030] env[62503]: DEBUG oslo_vmware.api [None req-5855f130-1d8c-4319-bf40-572a18bbb2a5 tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52d0fc66-6991-289a-20b1-04dd6e11b89d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.929085] env[62503]: DEBUG nova.scheduler.client.report [None req-aaad013a-a4dc-432c-87d1-a452b2fca374 tempest-ServerPasswordTestJSON-330364349 tempest-ServerPasswordTestJSON-330364349-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 825.099461] env[62503]: DEBUG nova.network.neutron [-] [instance: ef92e4ba-4ef3-4e26-9577-bad0c046ed47] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 825.336423] env[62503]: INFO nova.compute.manager [None req-2d6004ed-f8ef-4f69-80bc-a0a595a2cd5a tempest-AttachVolumeTestJSON-1238928908 tempest-AttachVolumeTestJSON-1238928908-project-member] [instance: 9eee91c6-a949-453b-8ccd-ba986251ed27] Took 1.03 seconds to deallocate network for instance. [ 825.379302] env[62503]: DEBUG oslo_vmware.api [None req-5855f130-1d8c-4319-bf40-572a18bbb2a5 tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52d0fc66-6991-289a-20b1-04dd6e11b89d, 'name': SearchDatastore_Task, 'duration_secs': 0.013318} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 825.379654] env[62503]: DEBUG oslo_concurrency.lockutils [None req-5855f130-1d8c-4319-bf40-572a18bbb2a5 tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Releasing lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 825.379884] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-5855f130-1d8c-4319-bf40-572a18bbb2a5 tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] [instance: 86422990-4215-4628-a7a7-4fdc910e304e] Processing image 8150ca02-f879-471d-8913-459408f127a1 {{(pid=62503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 825.380125] env[62503]: DEBUG oslo_concurrency.lockutils [None req-5855f130-1d8c-4319-bf40-572a18bbb2a5 tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 825.380297] env[62503]: DEBUG oslo_concurrency.lockutils [None req-5855f130-1d8c-4319-bf40-572a18bbb2a5 tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Acquired lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 825.380559] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-5855f130-1d8c-4319-bf40-572a18bbb2a5 tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 825.380835] env[62503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5356acd0-9c21-439c-b10e-844040e06e71 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.388628] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-5855f130-1d8c-4319-bf40-572a18bbb2a5 tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 825.388824] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-5855f130-1d8c-4319-bf40-572a18bbb2a5 tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 825.389520] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9707d0ce-0d3c-4f89-9ee7-cad254a4fc9e {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.394115] env[62503]: DEBUG oslo_vmware.api [None req-5855f130-1d8c-4319-bf40-572a18bbb2a5 tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Waiting for the task: (returnval){ [ 825.394115] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52542880-17d5-a0b1-0969-bb5ba7a68b25" [ 825.394115] env[62503]: _type = "Task" [ 825.394115] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 825.401288] env[62503]: DEBUG oslo_vmware.api [None req-5855f130-1d8c-4319-bf40-572a18bbb2a5 tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52542880-17d5-a0b1-0969-bb5ba7a68b25, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 825.434450] env[62503]: DEBUG oslo_concurrency.lockutils [None req-aaad013a-a4dc-432c-87d1-a452b2fca374 tempest-ServerPasswordTestJSON-330364349 tempest-ServerPasswordTestJSON-330364349-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.302s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 825.434931] env[62503]: DEBUG nova.compute.manager [None req-aaad013a-a4dc-432c-87d1-a452b2fca374 tempest-ServerPasswordTestJSON-330364349 tempest-ServerPasswordTestJSON-330364349-project-member] [instance: b1fc7438-2078-435a-9754-19a8a1bc6f5c] Start building networks asynchronously for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2832}} [ 825.438056] env[62503]: DEBUG oslo_concurrency.lockutils [None req-795b6309-d5a9-40f9-8a07-369f392b468e tempest-ServersTestManualDisk-925778780 tempest-ServersTestManualDisk-925778780-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 17.458s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 825.603736] env[62503]: INFO nova.compute.manager [-] [instance: ef92e4ba-4ef3-4e26-9577-bad0c046ed47] Took 1.02 seconds to deallocate network for instance. [ 825.905113] env[62503]: DEBUG oslo_vmware.api [None req-5855f130-1d8c-4319-bf40-572a18bbb2a5 tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52542880-17d5-a0b1-0969-bb5ba7a68b25, 'name': SearchDatastore_Task, 'duration_secs': 0.008097} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 825.906220] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c96293bc-a3fe-4cdb-8ceb-533ab2befe85 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.911321] env[62503]: DEBUG oslo_vmware.api [None req-5855f130-1d8c-4319-bf40-572a18bbb2a5 tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Waiting for the task: (returnval){ [ 825.911321] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]529b27ac-62c7-9228-7f40-de70c1d40af8" [ 825.911321] env[62503]: _type = "Task" [ 825.911321] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 825.918834] env[62503]: DEBUG oslo_vmware.api [None req-5855f130-1d8c-4319-bf40-572a18bbb2a5 tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]529b27ac-62c7-9228-7f40-de70c1d40af8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 825.942038] env[62503]: DEBUG nova.compute.utils [None req-aaad013a-a4dc-432c-87d1-a452b2fca374 tempest-ServerPasswordTestJSON-330364349 tempest-ServerPasswordTestJSON-330364349-project-member] Using /dev/sd instead of None {{(pid=62503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 825.946439] env[62503]: DEBUG nova.compute.manager [None req-aaad013a-a4dc-432c-87d1-a452b2fca374 tempest-ServerPasswordTestJSON-330364349 tempest-ServerPasswordTestJSON-330364349-project-member] [instance: b1fc7438-2078-435a-9754-19a8a1bc6f5c] Allocating IP information in the background. {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 825.946619] env[62503]: DEBUG nova.network.neutron [None req-aaad013a-a4dc-432c-87d1-a452b2fca374 tempest-ServerPasswordTestJSON-330364349 tempest-ServerPasswordTestJSON-330364349-project-member] [instance: b1fc7438-2078-435a-9754-19a8a1bc6f5c] allocate_for_instance() {{(pid=62503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 826.006483] env[62503]: DEBUG nova.policy [None req-aaad013a-a4dc-432c-87d1-a452b2fca374 tempest-ServerPasswordTestJSON-330364349 tempest-ServerPasswordTestJSON-330364349-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '55e2c8c758094436bf5930f8cb9077c7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'caa3c99962584618b6bdf44064a57f25', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62503) authorize /opt/stack/nova/nova/policy.py:201}} [ 826.109984] env[62503]: DEBUG oslo_concurrency.lockutils [None req-7d7f8f25-2959-4048-b880-cb44f0eba2e8 tempest-ServersAaction247Test-815974925 tempest-ServersAaction247Test-815974925-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 826.137461] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a6d3a60-9daf-4e2f-aa92-17d50ff3dcf2 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.145113] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c1b79e2-4d09-4d49-a8d9-d73609e09073 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.176257] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41d8ecfe-5402-4519-b97f-32873c9ad34e {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.183766] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-803c89ce-eefa-4d99-be8d-18acfeb2d10f {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.197819] env[62503]: DEBUG nova.compute.provider_tree [None req-795b6309-d5a9-40f9-8a07-369f392b468e tempest-ServersTestManualDisk-925778780 tempest-ServersTestManualDisk-925778780-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 826.263330] env[62503]: DEBUG nova.network.neutron [None req-aaad013a-a4dc-432c-87d1-a452b2fca374 tempest-ServerPasswordTestJSON-330364349 tempest-ServerPasswordTestJSON-330364349-project-member] [instance: b1fc7438-2078-435a-9754-19a8a1bc6f5c] Successfully created port: 13ea3744-a93f-474f-bacd-9c28ddac981a {{(pid=62503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 826.366815] env[62503]: INFO nova.scheduler.client.report [None req-2d6004ed-f8ef-4f69-80bc-a0a595a2cd5a tempest-AttachVolumeTestJSON-1238928908 tempest-AttachVolumeTestJSON-1238928908-project-member] Deleted allocations for instance 9eee91c6-a949-453b-8ccd-ba986251ed27 [ 826.422122] env[62503]: DEBUG oslo_vmware.api [None req-5855f130-1d8c-4319-bf40-572a18bbb2a5 tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]529b27ac-62c7-9228-7f40-de70c1d40af8, 'name': SearchDatastore_Task, 'duration_secs': 0.008096} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 826.422400] env[62503]: DEBUG oslo_concurrency.lockutils [None req-5855f130-1d8c-4319-bf40-572a18bbb2a5 tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Releasing lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 826.422689] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-5855f130-1d8c-4319-bf40-572a18bbb2a5 tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk to [datastore1] 86422990-4215-4628-a7a7-4fdc910e304e/86422990-4215-4628-a7a7-4fdc910e304e.vmdk {{(pid=62503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 826.422922] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-779ac90c-81c4-4272-8ddd-010a39b6aa1c {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.429124] env[62503]: DEBUG oslo_vmware.api [None req-5855f130-1d8c-4319-bf40-572a18bbb2a5 tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Waiting for the task: (returnval){ [ 826.429124] env[62503]: value = "task-1387779" [ 826.429124] env[62503]: _type = "Task" [ 826.429124] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 826.436950] env[62503]: DEBUG oslo_vmware.api [None req-5855f130-1d8c-4319-bf40-572a18bbb2a5 tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Task: {'id': task-1387779, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 826.449798] env[62503]: DEBUG nova.compute.manager [None req-aaad013a-a4dc-432c-87d1-a452b2fca374 tempest-ServerPasswordTestJSON-330364349 tempest-ServerPasswordTestJSON-330364349-project-member] [instance: b1fc7438-2078-435a-9754-19a8a1bc6f5c] Start building block device mappings for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2867}} [ 826.701650] env[62503]: DEBUG nova.scheduler.client.report [None req-795b6309-d5a9-40f9-8a07-369f392b468e tempest-ServersTestManualDisk-925778780 tempest-ServersTestManualDisk-925778780-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 826.885923] env[62503]: DEBUG oslo_concurrency.lockutils [None req-2d6004ed-f8ef-4f69-80bc-a0a595a2cd5a tempest-AttachVolumeTestJSON-1238928908 tempest-AttachVolumeTestJSON-1238928908-project-member] Lock "9eee91c6-a949-453b-8ccd-ba986251ed27" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 125.877s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 826.939589] env[62503]: DEBUG oslo_vmware.api [None req-5855f130-1d8c-4319-bf40-572a18bbb2a5 tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Task: {'id': task-1387779, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 827.184164] env[62503]: DEBUG nova.compute.manager [req-149fcede-de84-4c01-b366-c9138092cbd9 req-2dcd2243-c8a3-4cc1-a78e-309b4b2df031 service nova] [instance: b1fc7438-2078-435a-9754-19a8a1bc6f5c] Received event network-changed-13ea3744-a93f-474f-bacd-9c28ddac981a {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 827.184466] env[62503]: DEBUG nova.compute.manager [req-149fcede-de84-4c01-b366-c9138092cbd9 req-2dcd2243-c8a3-4cc1-a78e-309b4b2df031 service nova] [instance: b1fc7438-2078-435a-9754-19a8a1bc6f5c] Refreshing instance network info cache due to event network-changed-13ea3744-a93f-474f-bacd-9c28ddac981a. {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11432}} [ 827.184579] env[62503]: DEBUG oslo_concurrency.lockutils [req-149fcede-de84-4c01-b366-c9138092cbd9 req-2dcd2243-c8a3-4cc1-a78e-309b4b2df031 service nova] Acquiring lock "refresh_cache-b1fc7438-2078-435a-9754-19a8a1bc6f5c" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 827.184722] env[62503]: DEBUG oslo_concurrency.lockutils [req-149fcede-de84-4c01-b366-c9138092cbd9 req-2dcd2243-c8a3-4cc1-a78e-309b4b2df031 service nova] Acquired lock "refresh_cache-b1fc7438-2078-435a-9754-19a8a1bc6f5c" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 827.184876] env[62503]: DEBUG nova.network.neutron [req-149fcede-de84-4c01-b366-c9138092cbd9 req-2dcd2243-c8a3-4cc1-a78e-309b4b2df031 service nova] [instance: b1fc7438-2078-435a-9754-19a8a1bc6f5c] Refreshing network info cache for port 13ea3744-a93f-474f-bacd-9c28ddac981a {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 827.207743] env[62503]: DEBUG oslo_concurrency.lockutils [None req-795b6309-d5a9-40f9-8a07-369f392b468e tempest-ServersTestManualDisk-925778780 tempest-ServersTestManualDisk-925778780-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.770s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 827.208357] env[62503]: ERROR nova.compute.manager [None req-795b6309-d5a9-40f9-8a07-369f392b468e tempest-ServersTestManualDisk-925778780 tempest-ServersTestManualDisk-925778780-project-member] [instance: f6f17748-815c-417f-bce6-3bc97f23b637] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port ad998f5e-6b3a-49ee-ab61-31cfb715bf6a, please check neutron logs for more information. [ 827.208357] env[62503]: ERROR nova.compute.manager [instance: f6f17748-815c-417f-bce6-3bc97f23b637] Traceback (most recent call last): [ 827.208357] env[62503]: ERROR nova.compute.manager [instance: f6f17748-815c-417f-bce6-3bc97f23b637] File "/opt/stack/nova/nova/compute/manager.py", line 2644, in _build_and_run_instance [ 827.208357] env[62503]: ERROR nova.compute.manager [instance: f6f17748-815c-417f-bce6-3bc97f23b637] self.driver.spawn(context, instance, image_meta, [ 827.208357] env[62503]: ERROR nova.compute.manager [instance: f6f17748-815c-417f-bce6-3bc97f23b637] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 827.208357] env[62503]: ERROR nova.compute.manager [instance: f6f17748-815c-417f-bce6-3bc97f23b637] self._vmops.spawn(context, instance, image_meta, injected_files, [ 827.208357] env[62503]: ERROR nova.compute.manager [instance: f6f17748-815c-417f-bce6-3bc97f23b637] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 827.208357] env[62503]: ERROR nova.compute.manager [instance: f6f17748-815c-417f-bce6-3bc97f23b637] vm_ref = self.build_virtual_machine(instance, [ 827.208357] env[62503]: ERROR nova.compute.manager [instance: f6f17748-815c-417f-bce6-3bc97f23b637] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 827.208357] env[62503]: ERROR nova.compute.manager [instance: f6f17748-815c-417f-bce6-3bc97f23b637] vif_infos = vmwarevif.get_vif_info(self._session, [ 827.208357] env[62503]: ERROR nova.compute.manager [instance: f6f17748-815c-417f-bce6-3bc97f23b637] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 827.208663] env[62503]: ERROR nova.compute.manager [instance: f6f17748-815c-417f-bce6-3bc97f23b637] for vif in network_info: [ 827.208663] env[62503]: ERROR nova.compute.manager [instance: f6f17748-815c-417f-bce6-3bc97f23b637] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 827.208663] env[62503]: ERROR nova.compute.manager [instance: f6f17748-815c-417f-bce6-3bc97f23b637] return self._sync_wrapper(fn, *args, **kwargs) [ 827.208663] env[62503]: ERROR nova.compute.manager [instance: f6f17748-815c-417f-bce6-3bc97f23b637] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 827.208663] env[62503]: ERROR nova.compute.manager [instance: f6f17748-815c-417f-bce6-3bc97f23b637] self.wait() [ 827.208663] env[62503]: ERROR nova.compute.manager [instance: f6f17748-815c-417f-bce6-3bc97f23b637] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 827.208663] env[62503]: ERROR nova.compute.manager [instance: f6f17748-815c-417f-bce6-3bc97f23b637] self[:] = self._gt.wait() [ 827.208663] env[62503]: ERROR nova.compute.manager [instance: f6f17748-815c-417f-bce6-3bc97f23b637] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 827.208663] env[62503]: ERROR nova.compute.manager [instance: f6f17748-815c-417f-bce6-3bc97f23b637] return self._exit_event.wait() [ 827.208663] env[62503]: ERROR nova.compute.manager [instance: f6f17748-815c-417f-bce6-3bc97f23b637] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 827.208663] env[62503]: ERROR nova.compute.manager [instance: f6f17748-815c-417f-bce6-3bc97f23b637] current.throw(*self._exc) [ 827.208663] env[62503]: ERROR nova.compute.manager [instance: f6f17748-815c-417f-bce6-3bc97f23b637] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 827.208663] env[62503]: ERROR nova.compute.manager [instance: f6f17748-815c-417f-bce6-3bc97f23b637] result = function(*args, **kwargs) [ 827.209118] env[62503]: ERROR nova.compute.manager [instance: f6f17748-815c-417f-bce6-3bc97f23b637] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 827.209118] env[62503]: ERROR nova.compute.manager [instance: f6f17748-815c-417f-bce6-3bc97f23b637] return func(*args, **kwargs) [ 827.209118] env[62503]: ERROR nova.compute.manager [instance: f6f17748-815c-417f-bce6-3bc97f23b637] File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 827.209118] env[62503]: ERROR nova.compute.manager [instance: f6f17748-815c-417f-bce6-3bc97f23b637] raise e [ 827.209118] env[62503]: ERROR nova.compute.manager [instance: f6f17748-815c-417f-bce6-3bc97f23b637] File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 827.209118] env[62503]: ERROR nova.compute.manager [instance: f6f17748-815c-417f-bce6-3bc97f23b637] nwinfo = self.network_api.allocate_for_instance( [ 827.209118] env[62503]: ERROR nova.compute.manager [instance: f6f17748-815c-417f-bce6-3bc97f23b637] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 827.209118] env[62503]: ERROR nova.compute.manager [instance: f6f17748-815c-417f-bce6-3bc97f23b637] created_port_ids = self._update_ports_for_instance( [ 827.209118] env[62503]: ERROR nova.compute.manager [instance: f6f17748-815c-417f-bce6-3bc97f23b637] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 827.209118] env[62503]: ERROR nova.compute.manager [instance: f6f17748-815c-417f-bce6-3bc97f23b637] with excutils.save_and_reraise_exception(): [ 827.209118] env[62503]: ERROR nova.compute.manager [instance: f6f17748-815c-417f-bce6-3bc97f23b637] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 827.209118] env[62503]: ERROR nova.compute.manager [instance: f6f17748-815c-417f-bce6-3bc97f23b637] self.force_reraise() [ 827.209118] env[62503]: ERROR nova.compute.manager [instance: f6f17748-815c-417f-bce6-3bc97f23b637] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 827.209398] env[62503]: ERROR nova.compute.manager [instance: f6f17748-815c-417f-bce6-3bc97f23b637] raise self.value [ 827.209398] env[62503]: ERROR nova.compute.manager [instance: f6f17748-815c-417f-bce6-3bc97f23b637] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 827.209398] env[62503]: ERROR nova.compute.manager [instance: f6f17748-815c-417f-bce6-3bc97f23b637] updated_port = self._update_port( [ 827.209398] env[62503]: ERROR nova.compute.manager [instance: f6f17748-815c-417f-bce6-3bc97f23b637] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 827.209398] env[62503]: ERROR nova.compute.manager [instance: f6f17748-815c-417f-bce6-3bc97f23b637] _ensure_no_port_binding_failure(port) [ 827.209398] env[62503]: ERROR nova.compute.manager [instance: f6f17748-815c-417f-bce6-3bc97f23b637] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 827.209398] env[62503]: ERROR nova.compute.manager [instance: f6f17748-815c-417f-bce6-3bc97f23b637] raise exception.PortBindingFailed(port_id=port['id']) [ 827.209398] env[62503]: ERROR nova.compute.manager [instance: f6f17748-815c-417f-bce6-3bc97f23b637] nova.exception.PortBindingFailed: Binding failed for port ad998f5e-6b3a-49ee-ab61-31cfb715bf6a, please check neutron logs for more information. [ 827.209398] env[62503]: ERROR nova.compute.manager [instance: f6f17748-815c-417f-bce6-3bc97f23b637] [ 827.209398] env[62503]: DEBUG nova.compute.utils [None req-795b6309-d5a9-40f9-8a07-369f392b468e tempest-ServersTestManualDisk-925778780 tempest-ServersTestManualDisk-925778780-project-member] [instance: f6f17748-815c-417f-bce6-3bc97f23b637] Binding failed for port ad998f5e-6b3a-49ee-ab61-31cfb715bf6a, please check neutron logs for more information. {{(pid=62503) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 827.210160] env[62503]: DEBUG oslo_concurrency.lockutils [None req-1bdb5a3e-b1f3-497d-a480-68bcfc62ffc1 tempest-InstanceActionsNegativeTestJSON-133759453 tempest-InstanceActionsNegativeTestJSON-133759453-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 17.536s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 827.214088] env[62503]: DEBUG nova.compute.manager [None req-795b6309-d5a9-40f9-8a07-369f392b468e tempest-ServersTestManualDisk-925778780 tempest-ServersTestManualDisk-925778780-project-member] [instance: f6f17748-815c-417f-bce6-3bc97f23b637] Build of instance f6f17748-815c-417f-bce6-3bc97f23b637 was re-scheduled: Binding failed for port ad998f5e-6b3a-49ee-ab61-31cfb715bf6a, please check neutron logs for more information. {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2483}} [ 827.214511] env[62503]: DEBUG nova.compute.manager [None req-795b6309-d5a9-40f9-8a07-369f392b468e tempest-ServersTestManualDisk-925778780 tempest-ServersTestManualDisk-925778780-project-member] [instance: f6f17748-815c-417f-bce6-3bc97f23b637] Unplugging VIFs for instance {{(pid=62503) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3009}} [ 827.214734] env[62503]: DEBUG oslo_concurrency.lockutils [None req-795b6309-d5a9-40f9-8a07-369f392b468e tempest-ServersTestManualDisk-925778780 tempest-ServersTestManualDisk-925778780-project-member] Acquiring lock "refresh_cache-f6f17748-815c-417f-bce6-3bc97f23b637" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 827.214880] env[62503]: DEBUG oslo_concurrency.lockutils [None req-795b6309-d5a9-40f9-8a07-369f392b468e tempest-ServersTestManualDisk-925778780 tempest-ServersTestManualDisk-925778780-project-member] Acquired lock "refresh_cache-f6f17748-815c-417f-bce6-3bc97f23b637" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 827.215047] env[62503]: DEBUG nova.network.neutron [None req-795b6309-d5a9-40f9-8a07-369f392b468e tempest-ServersTestManualDisk-925778780 tempest-ServersTestManualDisk-925778780-project-member] [instance: f6f17748-815c-417f-bce6-3bc97f23b637] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 827.363867] env[62503]: ERROR nova.compute.manager [None req-aaad013a-a4dc-432c-87d1-a452b2fca374 tempest-ServerPasswordTestJSON-330364349 tempest-ServerPasswordTestJSON-330364349-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 13ea3744-a93f-474f-bacd-9c28ddac981a, please check neutron logs for more information. [ 827.363867] env[62503]: ERROR nova.compute.manager Traceback (most recent call last): [ 827.363867] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 827.363867] env[62503]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 827.363867] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 827.363867] env[62503]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 827.363867] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 827.363867] env[62503]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 827.363867] env[62503]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 827.363867] env[62503]: ERROR nova.compute.manager self.force_reraise() [ 827.363867] env[62503]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 827.363867] env[62503]: ERROR nova.compute.manager raise self.value [ 827.363867] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 827.363867] env[62503]: ERROR nova.compute.manager updated_port = self._update_port( [ 827.363867] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 827.363867] env[62503]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 827.364346] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 827.364346] env[62503]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 827.364346] env[62503]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 13ea3744-a93f-474f-bacd-9c28ddac981a, please check neutron logs for more information. [ 827.364346] env[62503]: ERROR nova.compute.manager [ 827.364346] env[62503]: Traceback (most recent call last): [ 827.364346] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 827.364346] env[62503]: listener.cb(fileno) [ 827.364346] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 827.364346] env[62503]: result = function(*args, **kwargs) [ 827.364346] env[62503]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 827.364346] env[62503]: return func(*args, **kwargs) [ 827.364346] env[62503]: File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 827.364346] env[62503]: raise e [ 827.364346] env[62503]: File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 827.364346] env[62503]: nwinfo = self.network_api.allocate_for_instance( [ 827.364346] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 827.364346] env[62503]: created_port_ids = self._update_ports_for_instance( [ 827.364346] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 827.364346] env[62503]: with excutils.save_and_reraise_exception(): [ 827.364346] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 827.364346] env[62503]: self.force_reraise() [ 827.364346] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 827.364346] env[62503]: raise self.value [ 827.364346] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 827.364346] env[62503]: updated_port = self._update_port( [ 827.364346] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 827.364346] env[62503]: _ensure_no_port_binding_failure(port) [ 827.364346] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 827.364346] env[62503]: raise exception.PortBindingFailed(port_id=port['id']) [ 827.365124] env[62503]: nova.exception.PortBindingFailed: Binding failed for port 13ea3744-a93f-474f-bacd-9c28ddac981a, please check neutron logs for more information. [ 827.365124] env[62503]: Removing descriptor: 14 [ 827.388218] env[62503]: DEBUG nova.compute.manager [None req-d5cf39b4-4dfb-4634-a37c-ca8390f95f74 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] [instance: 04b9ed30-2cd0-4c07-9141-76f0f53fefb4] Starting instance... {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 827.441346] env[62503]: DEBUG oslo_vmware.api [None req-5855f130-1d8c-4319-bf40-572a18bbb2a5 tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Task: {'id': task-1387779, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.542677} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 827.441617] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-5855f130-1d8c-4319-bf40-572a18bbb2a5 tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk to [datastore1] 86422990-4215-4628-a7a7-4fdc910e304e/86422990-4215-4628-a7a7-4fdc910e304e.vmdk {{(pid=62503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 827.441835] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-5855f130-1d8c-4319-bf40-572a18bbb2a5 tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] [instance: 86422990-4215-4628-a7a7-4fdc910e304e] Extending root virtual disk to 1048576 {{(pid=62503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 827.442096] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-779d4198-27f1-4674-9b2e-ed372fd6d1c6 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.450112] env[62503]: DEBUG oslo_vmware.api [None req-5855f130-1d8c-4319-bf40-572a18bbb2a5 tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Waiting for the task: (returnval){ [ 827.450112] env[62503]: value = "task-1387780" [ 827.450112] env[62503]: _type = "Task" [ 827.450112] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 827.458244] env[62503]: DEBUG nova.compute.manager [None req-aaad013a-a4dc-432c-87d1-a452b2fca374 tempest-ServerPasswordTestJSON-330364349 tempest-ServerPasswordTestJSON-330364349-project-member] [instance: b1fc7438-2078-435a-9754-19a8a1bc6f5c] Start spawning the instance on the hypervisor. {{(pid=62503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2641}} [ 827.460192] env[62503]: DEBUG oslo_vmware.api [None req-5855f130-1d8c-4319-bf40-572a18bbb2a5 tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Task: {'id': task-1387780, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 827.484025] env[62503]: DEBUG nova.virt.hardware [None req-aaad013a-a4dc-432c-87d1-a452b2fca374 tempest-ServerPasswordTestJSON-330364349 tempest-ServerPasswordTestJSON-330364349-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:26:20Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:26:03Z,direct_url=,disk_format='vmdk',id=8150ca02-f879-471d-8913-459408f127a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26d9ee75d25b4018b8bb1fb11c8bd98c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:26:04Z,virtual_size=,visibility=), allow threads: False {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 827.484025] env[62503]: DEBUG nova.virt.hardware [None req-aaad013a-a4dc-432c-87d1-a452b2fca374 tempest-ServerPasswordTestJSON-330364349 tempest-ServerPasswordTestJSON-330364349-project-member] Flavor limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 827.484025] env[62503]: DEBUG nova.virt.hardware [None req-aaad013a-a4dc-432c-87d1-a452b2fca374 tempest-ServerPasswordTestJSON-330364349 tempest-ServerPasswordTestJSON-330364349-project-member] Image limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 827.484239] env[62503]: DEBUG nova.virt.hardware [None req-aaad013a-a4dc-432c-87d1-a452b2fca374 tempest-ServerPasswordTestJSON-330364349 tempest-ServerPasswordTestJSON-330364349-project-member] Flavor pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 827.484239] env[62503]: DEBUG nova.virt.hardware [None req-aaad013a-a4dc-432c-87d1-a452b2fca374 tempest-ServerPasswordTestJSON-330364349 tempest-ServerPasswordTestJSON-330364349-project-member] Image pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 827.484239] env[62503]: DEBUG nova.virt.hardware [None req-aaad013a-a4dc-432c-87d1-a452b2fca374 tempest-ServerPasswordTestJSON-330364349 tempest-ServerPasswordTestJSON-330364349-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 827.484475] env[62503]: DEBUG nova.virt.hardware [None req-aaad013a-a4dc-432c-87d1-a452b2fca374 tempest-ServerPasswordTestJSON-330364349 tempest-ServerPasswordTestJSON-330364349-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 827.484641] env[62503]: DEBUG nova.virt.hardware [None req-aaad013a-a4dc-432c-87d1-a452b2fca374 tempest-ServerPasswordTestJSON-330364349 tempest-ServerPasswordTestJSON-330364349-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 827.484810] env[62503]: DEBUG nova.virt.hardware [None req-aaad013a-a4dc-432c-87d1-a452b2fca374 tempest-ServerPasswordTestJSON-330364349 tempest-ServerPasswordTestJSON-330364349-project-member] Got 1 possible topologies {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 827.484973] env[62503]: DEBUG nova.virt.hardware [None req-aaad013a-a4dc-432c-87d1-a452b2fca374 tempest-ServerPasswordTestJSON-330364349 tempest-ServerPasswordTestJSON-330364349-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 827.485171] env[62503]: DEBUG nova.virt.hardware [None req-aaad013a-a4dc-432c-87d1-a452b2fca374 tempest-ServerPasswordTestJSON-330364349 tempest-ServerPasswordTestJSON-330364349-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 827.486026] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd8919f1-4370-449b-9d7f-0f72064b2afb {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.493279] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fada4bc-5ba7-4b9d-a3c0-72fc432e74ad {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.507522] env[62503]: ERROR nova.compute.manager [None req-aaad013a-a4dc-432c-87d1-a452b2fca374 tempest-ServerPasswordTestJSON-330364349 tempest-ServerPasswordTestJSON-330364349-project-member] [instance: b1fc7438-2078-435a-9754-19a8a1bc6f5c] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 13ea3744-a93f-474f-bacd-9c28ddac981a, please check neutron logs for more information. [ 827.507522] env[62503]: ERROR nova.compute.manager [instance: b1fc7438-2078-435a-9754-19a8a1bc6f5c] Traceback (most recent call last): [ 827.507522] env[62503]: ERROR nova.compute.manager [instance: b1fc7438-2078-435a-9754-19a8a1bc6f5c] File "/opt/stack/nova/nova/compute/manager.py", line 2897, in _build_resources [ 827.507522] env[62503]: ERROR nova.compute.manager [instance: b1fc7438-2078-435a-9754-19a8a1bc6f5c] yield resources [ 827.507522] env[62503]: ERROR nova.compute.manager [instance: b1fc7438-2078-435a-9754-19a8a1bc6f5c] File "/opt/stack/nova/nova/compute/manager.py", line 2644, in _build_and_run_instance [ 827.507522] env[62503]: ERROR nova.compute.manager [instance: b1fc7438-2078-435a-9754-19a8a1bc6f5c] self.driver.spawn(context, instance, image_meta, [ 827.507522] env[62503]: ERROR nova.compute.manager [instance: b1fc7438-2078-435a-9754-19a8a1bc6f5c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 827.507522] env[62503]: ERROR nova.compute.manager [instance: b1fc7438-2078-435a-9754-19a8a1bc6f5c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 827.507522] env[62503]: ERROR nova.compute.manager [instance: b1fc7438-2078-435a-9754-19a8a1bc6f5c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 827.507522] env[62503]: ERROR nova.compute.manager [instance: b1fc7438-2078-435a-9754-19a8a1bc6f5c] vm_ref = self.build_virtual_machine(instance, [ 827.507522] env[62503]: ERROR nova.compute.manager [instance: b1fc7438-2078-435a-9754-19a8a1bc6f5c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 827.507861] env[62503]: ERROR nova.compute.manager [instance: b1fc7438-2078-435a-9754-19a8a1bc6f5c] vif_infos = vmwarevif.get_vif_info(self._session, [ 827.507861] env[62503]: ERROR nova.compute.manager [instance: b1fc7438-2078-435a-9754-19a8a1bc6f5c] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 827.507861] env[62503]: ERROR nova.compute.manager [instance: b1fc7438-2078-435a-9754-19a8a1bc6f5c] for vif in network_info: [ 827.507861] env[62503]: ERROR nova.compute.manager [instance: b1fc7438-2078-435a-9754-19a8a1bc6f5c] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 827.507861] env[62503]: ERROR nova.compute.manager [instance: b1fc7438-2078-435a-9754-19a8a1bc6f5c] return self._sync_wrapper(fn, *args, **kwargs) [ 827.507861] env[62503]: ERROR nova.compute.manager [instance: b1fc7438-2078-435a-9754-19a8a1bc6f5c] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 827.507861] env[62503]: ERROR nova.compute.manager [instance: b1fc7438-2078-435a-9754-19a8a1bc6f5c] self.wait() [ 827.507861] env[62503]: ERROR nova.compute.manager [instance: b1fc7438-2078-435a-9754-19a8a1bc6f5c] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 827.507861] env[62503]: ERROR nova.compute.manager [instance: b1fc7438-2078-435a-9754-19a8a1bc6f5c] self[:] = self._gt.wait() [ 827.507861] env[62503]: ERROR nova.compute.manager [instance: b1fc7438-2078-435a-9754-19a8a1bc6f5c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 827.507861] env[62503]: ERROR nova.compute.manager [instance: b1fc7438-2078-435a-9754-19a8a1bc6f5c] return self._exit_event.wait() [ 827.507861] env[62503]: ERROR nova.compute.manager [instance: b1fc7438-2078-435a-9754-19a8a1bc6f5c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 827.507861] env[62503]: ERROR nova.compute.manager [instance: b1fc7438-2078-435a-9754-19a8a1bc6f5c] current.throw(*self._exc) [ 827.508158] env[62503]: ERROR nova.compute.manager [instance: b1fc7438-2078-435a-9754-19a8a1bc6f5c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 827.508158] env[62503]: ERROR nova.compute.manager [instance: b1fc7438-2078-435a-9754-19a8a1bc6f5c] result = function(*args, **kwargs) [ 827.508158] env[62503]: ERROR nova.compute.manager [instance: b1fc7438-2078-435a-9754-19a8a1bc6f5c] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 827.508158] env[62503]: ERROR nova.compute.manager [instance: b1fc7438-2078-435a-9754-19a8a1bc6f5c] return func(*args, **kwargs) [ 827.508158] env[62503]: ERROR nova.compute.manager [instance: b1fc7438-2078-435a-9754-19a8a1bc6f5c] File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 827.508158] env[62503]: ERROR nova.compute.manager [instance: b1fc7438-2078-435a-9754-19a8a1bc6f5c] raise e [ 827.508158] env[62503]: ERROR nova.compute.manager [instance: b1fc7438-2078-435a-9754-19a8a1bc6f5c] File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 827.508158] env[62503]: ERROR nova.compute.manager [instance: b1fc7438-2078-435a-9754-19a8a1bc6f5c] nwinfo = self.network_api.allocate_for_instance( [ 827.508158] env[62503]: ERROR nova.compute.manager [instance: b1fc7438-2078-435a-9754-19a8a1bc6f5c] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 827.508158] env[62503]: ERROR nova.compute.manager [instance: b1fc7438-2078-435a-9754-19a8a1bc6f5c] created_port_ids = self._update_ports_for_instance( [ 827.508158] env[62503]: ERROR nova.compute.manager [instance: b1fc7438-2078-435a-9754-19a8a1bc6f5c] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 827.508158] env[62503]: ERROR nova.compute.manager [instance: b1fc7438-2078-435a-9754-19a8a1bc6f5c] with excutils.save_and_reraise_exception(): [ 827.508158] env[62503]: ERROR nova.compute.manager [instance: b1fc7438-2078-435a-9754-19a8a1bc6f5c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 827.508520] env[62503]: ERROR nova.compute.manager [instance: b1fc7438-2078-435a-9754-19a8a1bc6f5c] self.force_reraise() [ 827.508520] env[62503]: ERROR nova.compute.manager [instance: b1fc7438-2078-435a-9754-19a8a1bc6f5c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 827.508520] env[62503]: ERROR nova.compute.manager [instance: b1fc7438-2078-435a-9754-19a8a1bc6f5c] raise self.value [ 827.508520] env[62503]: ERROR nova.compute.manager [instance: b1fc7438-2078-435a-9754-19a8a1bc6f5c] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 827.508520] env[62503]: ERROR nova.compute.manager [instance: b1fc7438-2078-435a-9754-19a8a1bc6f5c] updated_port = self._update_port( [ 827.508520] env[62503]: ERROR nova.compute.manager [instance: b1fc7438-2078-435a-9754-19a8a1bc6f5c] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 827.508520] env[62503]: ERROR nova.compute.manager [instance: b1fc7438-2078-435a-9754-19a8a1bc6f5c] _ensure_no_port_binding_failure(port) [ 827.508520] env[62503]: ERROR nova.compute.manager [instance: b1fc7438-2078-435a-9754-19a8a1bc6f5c] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 827.508520] env[62503]: ERROR nova.compute.manager [instance: b1fc7438-2078-435a-9754-19a8a1bc6f5c] raise exception.PortBindingFailed(port_id=port['id']) [ 827.508520] env[62503]: ERROR nova.compute.manager [instance: b1fc7438-2078-435a-9754-19a8a1bc6f5c] nova.exception.PortBindingFailed: Binding failed for port 13ea3744-a93f-474f-bacd-9c28ddac981a, please check neutron logs for more information. [ 827.508520] env[62503]: ERROR nova.compute.manager [instance: b1fc7438-2078-435a-9754-19a8a1bc6f5c] [ 827.508520] env[62503]: INFO nova.compute.manager [None req-aaad013a-a4dc-432c-87d1-a452b2fca374 tempest-ServerPasswordTestJSON-330364349 tempest-ServerPasswordTestJSON-330364349-project-member] [instance: b1fc7438-2078-435a-9754-19a8a1bc6f5c] Terminating instance [ 827.509846] env[62503]: DEBUG oslo_concurrency.lockutils [None req-aaad013a-a4dc-432c-87d1-a452b2fca374 tempest-ServerPasswordTestJSON-330364349 tempest-ServerPasswordTestJSON-330364349-project-member] Acquiring lock "refresh_cache-b1fc7438-2078-435a-9754-19a8a1bc6f5c" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 827.708745] env[62503]: DEBUG nova.network.neutron [req-149fcede-de84-4c01-b366-c9138092cbd9 req-2dcd2243-c8a3-4cc1-a78e-309b4b2df031 service nova] [instance: b1fc7438-2078-435a-9754-19a8a1bc6f5c] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 827.733449] env[62503]: DEBUG nova.network.neutron [None req-795b6309-d5a9-40f9-8a07-369f392b468e tempest-ServersTestManualDisk-925778780 tempest-ServersTestManualDisk-925778780-project-member] [instance: f6f17748-815c-417f-bce6-3bc97f23b637] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 827.817104] env[62503]: DEBUG nova.network.neutron [None req-795b6309-d5a9-40f9-8a07-369f392b468e tempest-ServersTestManualDisk-925778780 tempest-ServersTestManualDisk-925778780-project-member] [instance: f6f17748-815c-417f-bce6-3bc97f23b637] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 827.827094] env[62503]: DEBUG nova.network.neutron [req-149fcede-de84-4c01-b366-c9138092cbd9 req-2dcd2243-c8a3-4cc1-a78e-309b4b2df031 service nova] [instance: b1fc7438-2078-435a-9754-19a8a1bc6f5c] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 827.909276] env[62503]: DEBUG oslo_concurrency.lockutils [None req-d5cf39b4-4dfb-4634-a37c-ca8390f95f74 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 827.932593] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d7f2ae1-2362-42ff-b135-6846551910c9 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.940276] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96d417ff-6bf1-45c0-a209-05604d702761 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.974021] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d6616cc-20ee-457e-9324-81656f5633d9 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.981813] env[62503]: DEBUG oslo_vmware.api [None req-5855f130-1d8c-4319-bf40-572a18bbb2a5 tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Task: {'id': task-1387780, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071602} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 827.983532] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-5855f130-1d8c-4319-bf40-572a18bbb2a5 tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] [instance: 86422990-4215-4628-a7a7-4fdc910e304e] Extended root virtual disk {{(pid=62503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 827.984057] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-996de4c3-d698-4629-af72-6d6614fe1574 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.987457] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51def6d8-c6e9-4bd6-a13e-88984e261cc6 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.008957] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-5855f130-1d8c-4319-bf40-572a18bbb2a5 tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] [instance: 86422990-4215-4628-a7a7-4fdc910e304e] Reconfiguring VM instance instance-00000039 to attach disk [datastore1] 86422990-4215-4628-a7a7-4fdc910e304e/86422990-4215-4628-a7a7-4fdc910e304e.vmdk or device None with type sparse {{(pid=62503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 828.016866] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6a5165c1-d7cb-45a3-a31d-c163f45cdca4 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.031014] env[62503]: DEBUG nova.compute.provider_tree [None req-1bdb5a3e-b1f3-497d-a480-68bcfc62ffc1 tempest-InstanceActionsNegativeTestJSON-133759453 tempest-InstanceActionsNegativeTestJSON-133759453-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 828.038293] env[62503]: DEBUG oslo_vmware.api [None req-5855f130-1d8c-4319-bf40-572a18bbb2a5 tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Waiting for the task: (returnval){ [ 828.038293] env[62503]: value = "task-1387781" [ 828.038293] env[62503]: _type = "Task" [ 828.038293] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 828.046918] env[62503]: DEBUG oslo_vmware.api [None req-5855f130-1d8c-4319-bf40-572a18bbb2a5 tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Task: {'id': task-1387781, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.320352] env[62503]: DEBUG oslo_concurrency.lockutils [None req-795b6309-d5a9-40f9-8a07-369f392b468e tempest-ServersTestManualDisk-925778780 tempest-ServersTestManualDisk-925778780-project-member] Releasing lock "refresh_cache-f6f17748-815c-417f-bce6-3bc97f23b637" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 828.320613] env[62503]: DEBUG nova.compute.manager [None req-795b6309-d5a9-40f9-8a07-369f392b468e tempest-ServersTestManualDisk-925778780 tempest-ServersTestManualDisk-925778780-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62503) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3032}} [ 828.320827] env[62503]: DEBUG nova.compute.manager [None req-795b6309-d5a9-40f9-8a07-369f392b468e tempest-ServersTestManualDisk-925778780 tempest-ServersTestManualDisk-925778780-project-member] [instance: f6f17748-815c-417f-bce6-3bc97f23b637] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 828.320992] env[62503]: DEBUG nova.network.neutron [None req-795b6309-d5a9-40f9-8a07-369f392b468e tempest-ServersTestManualDisk-925778780 tempest-ServersTestManualDisk-925778780-project-member] [instance: f6f17748-815c-417f-bce6-3bc97f23b637] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 828.332298] env[62503]: DEBUG oslo_concurrency.lockutils [req-149fcede-de84-4c01-b366-c9138092cbd9 req-2dcd2243-c8a3-4cc1-a78e-309b4b2df031 service nova] Releasing lock "refresh_cache-b1fc7438-2078-435a-9754-19a8a1bc6f5c" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 828.333470] env[62503]: DEBUG oslo_concurrency.lockutils [None req-aaad013a-a4dc-432c-87d1-a452b2fca374 tempest-ServerPasswordTestJSON-330364349 tempest-ServerPasswordTestJSON-330364349-project-member] Acquired lock "refresh_cache-b1fc7438-2078-435a-9754-19a8a1bc6f5c" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 828.333804] env[62503]: DEBUG nova.network.neutron [None req-aaad013a-a4dc-432c-87d1-a452b2fca374 tempest-ServerPasswordTestJSON-330364349 tempest-ServerPasswordTestJSON-330364349-project-member] [instance: b1fc7438-2078-435a-9754-19a8a1bc6f5c] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 828.338030] env[62503]: DEBUG nova.network.neutron [None req-795b6309-d5a9-40f9-8a07-369f392b468e tempest-ServersTestManualDisk-925778780 tempest-ServersTestManualDisk-925778780-project-member] [instance: f6f17748-815c-417f-bce6-3bc97f23b637] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 828.534552] env[62503]: DEBUG nova.scheduler.client.report [None req-1bdb5a3e-b1f3-497d-a480-68bcfc62ffc1 tempest-InstanceActionsNegativeTestJSON-133759453 tempest-InstanceActionsNegativeTestJSON-133759453-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 828.550223] env[62503]: DEBUG oslo_vmware.api [None req-5855f130-1d8c-4319-bf40-572a18bbb2a5 tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Task: {'id': task-1387781, 'name': ReconfigVM_Task, 'duration_secs': 0.246204} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 828.550855] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-5855f130-1d8c-4319-bf40-572a18bbb2a5 tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] [instance: 86422990-4215-4628-a7a7-4fdc910e304e] Reconfigured VM instance instance-00000039 to attach disk [datastore1] 86422990-4215-4628-a7a7-4fdc910e304e/86422990-4215-4628-a7a7-4fdc910e304e.vmdk or device None with type sparse {{(pid=62503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 828.550971] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3f4f767f-2178-4829-8981-b8c16036de9e {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.557078] env[62503]: DEBUG oslo_vmware.api [None req-5855f130-1d8c-4319-bf40-572a18bbb2a5 tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Waiting for the task: (returnval){ [ 828.557078] env[62503]: value = "task-1387782" [ 828.557078] env[62503]: _type = "Task" [ 828.557078] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 828.564925] env[62503]: DEBUG oslo_vmware.api [None req-5855f130-1d8c-4319-bf40-572a18bbb2a5 tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Task: {'id': task-1387782, 'name': Rename_Task} progress is 5%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.843768] env[62503]: DEBUG nova.network.neutron [None req-795b6309-d5a9-40f9-8a07-369f392b468e tempest-ServersTestManualDisk-925778780 tempest-ServersTestManualDisk-925778780-project-member] [instance: f6f17748-815c-417f-bce6-3bc97f23b637] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 828.862995] env[62503]: DEBUG nova.network.neutron [None req-aaad013a-a4dc-432c-87d1-a452b2fca374 tempest-ServerPasswordTestJSON-330364349 tempest-ServerPasswordTestJSON-330364349-project-member] [instance: b1fc7438-2078-435a-9754-19a8a1bc6f5c] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 828.996963] env[62503]: DEBUG nova.network.neutron [None req-aaad013a-a4dc-432c-87d1-a452b2fca374 tempest-ServerPasswordTestJSON-330364349 tempest-ServerPasswordTestJSON-330364349-project-member] [instance: b1fc7438-2078-435a-9754-19a8a1bc6f5c] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 829.039605] env[62503]: DEBUG oslo_concurrency.lockutils [None req-1bdb5a3e-b1f3-497d-a480-68bcfc62ffc1 tempest-InstanceActionsNegativeTestJSON-133759453 tempest-InstanceActionsNegativeTestJSON-133759453-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.829s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 829.040274] env[62503]: ERROR nova.compute.manager [None req-1bdb5a3e-b1f3-497d-a480-68bcfc62ffc1 tempest-InstanceActionsNegativeTestJSON-133759453 tempest-InstanceActionsNegativeTestJSON-133759453-project-member] [instance: 1251e59f-9c01-4115-8400-40aacedd97e2] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 0b345b9f-73a4-4907-b9be-78896c2c7c3b, please check neutron logs for more information. [ 829.040274] env[62503]: ERROR nova.compute.manager [instance: 1251e59f-9c01-4115-8400-40aacedd97e2] Traceback (most recent call last): [ 829.040274] env[62503]: ERROR nova.compute.manager [instance: 1251e59f-9c01-4115-8400-40aacedd97e2] File "/opt/stack/nova/nova/compute/manager.py", line 2644, in _build_and_run_instance [ 829.040274] env[62503]: ERROR nova.compute.manager [instance: 1251e59f-9c01-4115-8400-40aacedd97e2] self.driver.spawn(context, instance, image_meta, [ 829.040274] env[62503]: ERROR nova.compute.manager [instance: 1251e59f-9c01-4115-8400-40aacedd97e2] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 829.040274] env[62503]: ERROR nova.compute.manager [instance: 1251e59f-9c01-4115-8400-40aacedd97e2] self._vmops.spawn(context, instance, image_meta, injected_files, [ 829.040274] env[62503]: ERROR nova.compute.manager [instance: 1251e59f-9c01-4115-8400-40aacedd97e2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 829.040274] env[62503]: ERROR nova.compute.manager [instance: 1251e59f-9c01-4115-8400-40aacedd97e2] vm_ref = self.build_virtual_machine(instance, [ 829.040274] env[62503]: ERROR nova.compute.manager [instance: 1251e59f-9c01-4115-8400-40aacedd97e2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 829.040274] env[62503]: ERROR nova.compute.manager [instance: 1251e59f-9c01-4115-8400-40aacedd97e2] vif_infos = vmwarevif.get_vif_info(self._session, [ 829.040274] env[62503]: ERROR nova.compute.manager [instance: 1251e59f-9c01-4115-8400-40aacedd97e2] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 829.040708] env[62503]: ERROR nova.compute.manager [instance: 1251e59f-9c01-4115-8400-40aacedd97e2] for vif in network_info: [ 829.040708] env[62503]: ERROR nova.compute.manager [instance: 1251e59f-9c01-4115-8400-40aacedd97e2] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 829.040708] env[62503]: ERROR nova.compute.manager [instance: 1251e59f-9c01-4115-8400-40aacedd97e2] return self._sync_wrapper(fn, *args, **kwargs) [ 829.040708] env[62503]: ERROR nova.compute.manager [instance: 1251e59f-9c01-4115-8400-40aacedd97e2] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 829.040708] env[62503]: ERROR nova.compute.manager [instance: 1251e59f-9c01-4115-8400-40aacedd97e2] self.wait() [ 829.040708] env[62503]: ERROR nova.compute.manager [instance: 1251e59f-9c01-4115-8400-40aacedd97e2] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 829.040708] env[62503]: ERROR nova.compute.manager [instance: 1251e59f-9c01-4115-8400-40aacedd97e2] self[:] = self._gt.wait() [ 829.040708] env[62503]: ERROR nova.compute.manager [instance: 1251e59f-9c01-4115-8400-40aacedd97e2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 829.040708] env[62503]: ERROR nova.compute.manager [instance: 1251e59f-9c01-4115-8400-40aacedd97e2] return self._exit_event.wait() [ 829.040708] env[62503]: ERROR nova.compute.manager [instance: 1251e59f-9c01-4115-8400-40aacedd97e2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 829.040708] env[62503]: ERROR nova.compute.manager [instance: 1251e59f-9c01-4115-8400-40aacedd97e2] current.throw(*self._exc) [ 829.040708] env[62503]: ERROR nova.compute.manager [instance: 1251e59f-9c01-4115-8400-40aacedd97e2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 829.040708] env[62503]: ERROR nova.compute.manager [instance: 1251e59f-9c01-4115-8400-40aacedd97e2] result = function(*args, **kwargs) [ 829.041072] env[62503]: ERROR nova.compute.manager [instance: 1251e59f-9c01-4115-8400-40aacedd97e2] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 829.041072] env[62503]: ERROR nova.compute.manager [instance: 1251e59f-9c01-4115-8400-40aacedd97e2] return func(*args, **kwargs) [ 829.041072] env[62503]: ERROR nova.compute.manager [instance: 1251e59f-9c01-4115-8400-40aacedd97e2] File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 829.041072] env[62503]: ERROR nova.compute.manager [instance: 1251e59f-9c01-4115-8400-40aacedd97e2] raise e [ 829.041072] env[62503]: ERROR nova.compute.manager [instance: 1251e59f-9c01-4115-8400-40aacedd97e2] File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 829.041072] env[62503]: ERROR nova.compute.manager [instance: 1251e59f-9c01-4115-8400-40aacedd97e2] nwinfo = self.network_api.allocate_for_instance( [ 829.041072] env[62503]: ERROR nova.compute.manager [instance: 1251e59f-9c01-4115-8400-40aacedd97e2] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 829.041072] env[62503]: ERROR nova.compute.manager [instance: 1251e59f-9c01-4115-8400-40aacedd97e2] created_port_ids = self._update_ports_for_instance( [ 829.041072] env[62503]: ERROR nova.compute.manager [instance: 1251e59f-9c01-4115-8400-40aacedd97e2] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 829.041072] env[62503]: ERROR nova.compute.manager [instance: 1251e59f-9c01-4115-8400-40aacedd97e2] with excutils.save_and_reraise_exception(): [ 829.041072] env[62503]: ERROR nova.compute.manager [instance: 1251e59f-9c01-4115-8400-40aacedd97e2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 829.041072] env[62503]: ERROR nova.compute.manager [instance: 1251e59f-9c01-4115-8400-40aacedd97e2] self.force_reraise() [ 829.041072] env[62503]: ERROR nova.compute.manager [instance: 1251e59f-9c01-4115-8400-40aacedd97e2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 829.041421] env[62503]: ERROR nova.compute.manager [instance: 1251e59f-9c01-4115-8400-40aacedd97e2] raise self.value [ 829.041421] env[62503]: ERROR nova.compute.manager [instance: 1251e59f-9c01-4115-8400-40aacedd97e2] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 829.041421] env[62503]: ERROR nova.compute.manager [instance: 1251e59f-9c01-4115-8400-40aacedd97e2] updated_port = self._update_port( [ 829.041421] env[62503]: ERROR nova.compute.manager [instance: 1251e59f-9c01-4115-8400-40aacedd97e2] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 829.041421] env[62503]: ERROR nova.compute.manager [instance: 1251e59f-9c01-4115-8400-40aacedd97e2] _ensure_no_port_binding_failure(port) [ 829.041421] env[62503]: ERROR nova.compute.manager [instance: 1251e59f-9c01-4115-8400-40aacedd97e2] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 829.041421] env[62503]: ERROR nova.compute.manager [instance: 1251e59f-9c01-4115-8400-40aacedd97e2] raise exception.PortBindingFailed(port_id=port['id']) [ 829.041421] env[62503]: ERROR nova.compute.manager [instance: 1251e59f-9c01-4115-8400-40aacedd97e2] nova.exception.PortBindingFailed: Binding failed for port 0b345b9f-73a4-4907-b9be-78896c2c7c3b, please check neutron logs for more information. [ 829.041421] env[62503]: ERROR nova.compute.manager [instance: 1251e59f-9c01-4115-8400-40aacedd97e2] [ 829.041421] env[62503]: DEBUG nova.compute.utils [None req-1bdb5a3e-b1f3-497d-a480-68bcfc62ffc1 tempest-InstanceActionsNegativeTestJSON-133759453 tempest-InstanceActionsNegativeTestJSON-133759453-project-member] [instance: 1251e59f-9c01-4115-8400-40aacedd97e2] Binding failed for port 0b345b9f-73a4-4907-b9be-78896c2c7c3b, please check neutron logs for more information. {{(pid=62503) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 829.042992] env[62503]: DEBUG nova.compute.manager [None req-1bdb5a3e-b1f3-497d-a480-68bcfc62ffc1 tempest-InstanceActionsNegativeTestJSON-133759453 tempest-InstanceActionsNegativeTestJSON-133759453-project-member] [instance: 1251e59f-9c01-4115-8400-40aacedd97e2] Build of instance 1251e59f-9c01-4115-8400-40aacedd97e2 was re-scheduled: Binding failed for port 0b345b9f-73a4-4907-b9be-78896c2c7c3b, please check neutron logs for more information. {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2483}} [ 829.043131] env[62503]: DEBUG nova.compute.manager [None req-1bdb5a3e-b1f3-497d-a480-68bcfc62ffc1 tempest-InstanceActionsNegativeTestJSON-133759453 tempest-InstanceActionsNegativeTestJSON-133759453-project-member] [instance: 1251e59f-9c01-4115-8400-40aacedd97e2] Unplugging VIFs for instance {{(pid=62503) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3009}} [ 829.043393] env[62503]: DEBUG oslo_concurrency.lockutils [None req-1bdb5a3e-b1f3-497d-a480-68bcfc62ffc1 tempest-InstanceActionsNegativeTestJSON-133759453 tempest-InstanceActionsNegativeTestJSON-133759453-project-member] Acquiring lock "refresh_cache-1251e59f-9c01-4115-8400-40aacedd97e2" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 829.043460] env[62503]: DEBUG oslo_concurrency.lockutils [None req-1bdb5a3e-b1f3-497d-a480-68bcfc62ffc1 tempest-InstanceActionsNegativeTestJSON-133759453 tempest-InstanceActionsNegativeTestJSON-133759453-project-member] Acquired lock "refresh_cache-1251e59f-9c01-4115-8400-40aacedd97e2" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 829.044382] env[62503]: DEBUG nova.network.neutron [None req-1bdb5a3e-b1f3-497d-a480-68bcfc62ffc1 tempest-InstanceActionsNegativeTestJSON-133759453 tempest-InstanceActionsNegativeTestJSON-133759453-project-member] [instance: 1251e59f-9c01-4115-8400-40aacedd97e2] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 829.044580] env[62503]: DEBUG oslo_concurrency.lockutils [None req-d66ce770-1029-4d34-900e-8c450c39e993 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.108s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 829.050308] env[62503]: INFO nova.compute.claims [None req-d66ce770-1029-4d34-900e-8c450c39e993 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: 4deb28e7-351b-41b7-90bb-afdde200f7fa] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 829.067670] env[62503]: DEBUG oslo_vmware.api [None req-5855f130-1d8c-4319-bf40-572a18bbb2a5 tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Task: {'id': task-1387782, 'name': Rename_Task, 'duration_secs': 0.13652} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 829.068473] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-5855f130-1d8c-4319-bf40-572a18bbb2a5 tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] [instance: 86422990-4215-4628-a7a7-4fdc910e304e] Powering on the VM {{(pid=62503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 829.068473] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b9fe5ff7-1e75-4152-98ac-890f961e080a {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.075348] env[62503]: DEBUG oslo_vmware.api [None req-5855f130-1d8c-4319-bf40-572a18bbb2a5 tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Waiting for the task: (returnval){ [ 829.075348] env[62503]: value = "task-1387783" [ 829.075348] env[62503]: _type = "Task" [ 829.075348] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 829.084596] env[62503]: DEBUG oslo_vmware.api [None req-5855f130-1d8c-4319-bf40-572a18bbb2a5 tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Task: {'id': task-1387783, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.210007] env[62503]: DEBUG nova.compute.manager [req-847edf69-7104-48d6-848b-9bff3bcd1818 req-37504590-da92-4651-a4b5-03c19c90947d service nova] [instance: b1fc7438-2078-435a-9754-19a8a1bc6f5c] Received event network-vif-deleted-13ea3744-a93f-474f-bacd-9c28ddac981a {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 829.348524] env[62503]: INFO nova.compute.manager [None req-795b6309-d5a9-40f9-8a07-369f392b468e tempest-ServersTestManualDisk-925778780 tempest-ServersTestManualDisk-925778780-project-member] [instance: f6f17748-815c-417f-bce6-3bc97f23b637] Took 1.03 seconds to deallocate network for instance. [ 829.500111] env[62503]: DEBUG oslo_concurrency.lockutils [None req-aaad013a-a4dc-432c-87d1-a452b2fca374 tempest-ServerPasswordTestJSON-330364349 tempest-ServerPasswordTestJSON-330364349-project-member] Releasing lock "refresh_cache-b1fc7438-2078-435a-9754-19a8a1bc6f5c" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 829.500595] env[62503]: DEBUG nova.compute.manager [None req-aaad013a-a4dc-432c-87d1-a452b2fca374 tempest-ServerPasswordTestJSON-330364349 tempest-ServerPasswordTestJSON-330364349-project-member] [instance: b1fc7438-2078-435a-9754-19a8a1bc6f5c] Start destroying the instance on the hypervisor. {{(pid=62503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3156}} [ 829.500804] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-aaad013a-a4dc-432c-87d1-a452b2fca374 tempest-ServerPasswordTestJSON-330364349 tempest-ServerPasswordTestJSON-330364349-project-member] [instance: b1fc7438-2078-435a-9754-19a8a1bc6f5c] Destroying instance {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 829.501143] env[62503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f5bd3d51-efca-41ff-92a5-3efbb32a3799 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.509594] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49230b41-bdef-46d7-90be-1ddf62b593b5 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.531740] env[62503]: WARNING nova.virt.vmwareapi.vmops [None req-aaad013a-a4dc-432c-87d1-a452b2fca374 tempest-ServerPasswordTestJSON-330364349 tempest-ServerPasswordTestJSON-330364349-project-member] [instance: b1fc7438-2078-435a-9754-19a8a1bc6f5c] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance b1fc7438-2078-435a-9754-19a8a1bc6f5c could not be found. [ 829.532016] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-aaad013a-a4dc-432c-87d1-a452b2fca374 tempest-ServerPasswordTestJSON-330364349 tempest-ServerPasswordTestJSON-330364349-project-member] [instance: b1fc7438-2078-435a-9754-19a8a1bc6f5c] Instance destroyed {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 829.532238] env[62503]: INFO nova.compute.manager [None req-aaad013a-a4dc-432c-87d1-a452b2fca374 tempest-ServerPasswordTestJSON-330364349 tempest-ServerPasswordTestJSON-330364349-project-member] [instance: b1fc7438-2078-435a-9754-19a8a1bc6f5c] Took 0.03 seconds to destroy the instance on the hypervisor. [ 829.532482] env[62503]: DEBUG oslo.service.loopingcall [None req-aaad013a-a4dc-432c-87d1-a452b2fca374 tempest-ServerPasswordTestJSON-330364349 tempest-ServerPasswordTestJSON-330364349-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 829.532713] env[62503]: DEBUG nova.compute.manager [-] [instance: b1fc7438-2078-435a-9754-19a8a1bc6f5c] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 829.532804] env[62503]: DEBUG nova.network.neutron [-] [instance: b1fc7438-2078-435a-9754-19a8a1bc6f5c] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 829.547569] env[62503]: DEBUG nova.network.neutron [-] [instance: b1fc7438-2078-435a-9754-19a8a1bc6f5c] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 829.573152] env[62503]: DEBUG nova.network.neutron [None req-1bdb5a3e-b1f3-497d-a480-68bcfc62ffc1 tempest-InstanceActionsNegativeTestJSON-133759453 tempest-InstanceActionsNegativeTestJSON-133759453-project-member] [instance: 1251e59f-9c01-4115-8400-40aacedd97e2] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 829.584611] env[62503]: DEBUG oslo_vmware.api [None req-5855f130-1d8c-4319-bf40-572a18bbb2a5 tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Task: {'id': task-1387783, 'name': PowerOnVM_Task, 'duration_secs': 0.43311} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 829.585216] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-5855f130-1d8c-4319-bf40-572a18bbb2a5 tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] [instance: 86422990-4215-4628-a7a7-4fdc910e304e] Powered on the VM {{(pid=62503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 829.585216] env[62503]: DEBUG nova.compute.manager [None req-5855f130-1d8c-4319-bf40-572a18bbb2a5 tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] [instance: 86422990-4215-4628-a7a7-4fdc910e304e] Checking state {{(pid=62503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1794}} [ 829.585831] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d051cc6-d353-4e84-b40d-37d37eb3bec7 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.662109] env[62503]: DEBUG nova.network.neutron [None req-1bdb5a3e-b1f3-497d-a480-68bcfc62ffc1 tempest-InstanceActionsNegativeTestJSON-133759453 tempest-InstanceActionsNegativeTestJSON-133759453-project-member] [instance: 1251e59f-9c01-4115-8400-40aacedd97e2] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 830.049887] env[62503]: DEBUG nova.network.neutron [-] [instance: b1fc7438-2078-435a-9754-19a8a1bc6f5c] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 830.103551] env[62503]: DEBUG oslo_concurrency.lockutils [None req-5855f130-1d8c-4319-bf40-572a18bbb2a5 tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 830.165646] env[62503]: DEBUG oslo_concurrency.lockutils [None req-1bdb5a3e-b1f3-497d-a480-68bcfc62ffc1 tempest-InstanceActionsNegativeTestJSON-133759453 tempest-InstanceActionsNegativeTestJSON-133759453-project-member] Releasing lock "refresh_cache-1251e59f-9c01-4115-8400-40aacedd97e2" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 830.165884] env[62503]: DEBUG nova.compute.manager [None req-1bdb5a3e-b1f3-497d-a480-68bcfc62ffc1 tempest-InstanceActionsNegativeTestJSON-133759453 tempest-InstanceActionsNegativeTestJSON-133759453-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62503) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3032}} [ 830.166086] env[62503]: DEBUG nova.compute.manager [None req-1bdb5a3e-b1f3-497d-a480-68bcfc62ffc1 tempest-InstanceActionsNegativeTestJSON-133759453 tempest-InstanceActionsNegativeTestJSON-133759453-project-member] [instance: 1251e59f-9c01-4115-8400-40aacedd97e2] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 830.166319] env[62503]: DEBUG nova.network.neutron [None req-1bdb5a3e-b1f3-497d-a480-68bcfc62ffc1 tempest-InstanceActionsNegativeTestJSON-133759453 tempest-InstanceActionsNegativeTestJSON-133759453-project-member] [instance: 1251e59f-9c01-4115-8400-40aacedd97e2] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 830.180635] env[62503]: DEBUG nova.network.neutron [None req-1bdb5a3e-b1f3-497d-a480-68bcfc62ffc1 tempest-InstanceActionsNegativeTestJSON-133759453 tempest-InstanceActionsNegativeTestJSON-133759453-project-member] [instance: 1251e59f-9c01-4115-8400-40aacedd97e2] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 830.257156] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a57615f9-2971-43c4-a5db-57b72de2878a {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.264350] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac49518f-f4ec-41b1-a423-f9400ea25205 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.303324] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e14022b8-24c8-4e01-925b-cc59efb68531 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.317148] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b799f5d9-34e2-4906-87d3-dca56e7386a4 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.337225] env[62503]: DEBUG nova.compute.provider_tree [None req-d66ce770-1029-4d34-900e-8c450c39e993 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 830.376727] env[62503]: INFO nova.scheduler.client.report [None req-795b6309-d5a9-40f9-8a07-369f392b468e tempest-ServersTestManualDisk-925778780 tempest-ServersTestManualDisk-925778780-project-member] Deleted allocations for instance f6f17748-815c-417f-bce6-3bc97f23b637 [ 830.552941] env[62503]: INFO nova.compute.manager [-] [instance: b1fc7438-2078-435a-9754-19a8a1bc6f5c] Took 1.02 seconds to deallocate network for instance. [ 830.555514] env[62503]: DEBUG nova.compute.claims [None req-aaad013a-a4dc-432c-87d1-a452b2fca374 tempest-ServerPasswordTestJSON-330364349 tempest-ServerPasswordTestJSON-330364349-project-member] [instance: b1fc7438-2078-435a-9754-19a8a1bc6f5c] Aborting claim: {{(pid=62503) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 830.555697] env[62503]: DEBUG oslo_concurrency.lockutils [None req-aaad013a-a4dc-432c-87d1-a452b2fca374 tempest-ServerPasswordTestJSON-330364349 tempest-ServerPasswordTestJSON-330364349-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 830.683581] env[62503]: DEBUG nova.network.neutron [None req-1bdb5a3e-b1f3-497d-a480-68bcfc62ffc1 tempest-InstanceActionsNegativeTestJSON-133759453 tempest-InstanceActionsNegativeTestJSON-133759453-project-member] [instance: 1251e59f-9c01-4115-8400-40aacedd97e2] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 830.811247] env[62503]: DEBUG oslo_concurrency.lockutils [None req-dc7fb366-4910-454c-9b67-dd826f7f0468 tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Acquiring lock "86422990-4215-4628-a7a7-4fdc910e304e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 830.811535] env[62503]: DEBUG oslo_concurrency.lockutils [None req-dc7fb366-4910-454c-9b67-dd826f7f0468 tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Lock "86422990-4215-4628-a7a7-4fdc910e304e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 830.811889] env[62503]: DEBUG oslo_concurrency.lockutils [None req-dc7fb366-4910-454c-9b67-dd826f7f0468 tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Acquiring lock "86422990-4215-4628-a7a7-4fdc910e304e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 830.812079] env[62503]: DEBUG oslo_concurrency.lockutils [None req-dc7fb366-4910-454c-9b67-dd826f7f0468 tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Lock "86422990-4215-4628-a7a7-4fdc910e304e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 830.812251] env[62503]: DEBUG oslo_concurrency.lockutils [None req-dc7fb366-4910-454c-9b67-dd826f7f0468 tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Lock "86422990-4215-4628-a7a7-4fdc910e304e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 830.817094] env[62503]: INFO nova.compute.manager [None req-dc7fb366-4910-454c-9b67-dd826f7f0468 tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] [instance: 86422990-4215-4628-a7a7-4fdc910e304e] Terminating instance [ 830.818838] env[62503]: DEBUG oslo_concurrency.lockutils [None req-dc7fb366-4910-454c-9b67-dd826f7f0468 tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Acquiring lock "refresh_cache-86422990-4215-4628-a7a7-4fdc910e304e" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 830.818996] env[62503]: DEBUG oslo_concurrency.lockutils [None req-dc7fb366-4910-454c-9b67-dd826f7f0468 tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Acquired lock "refresh_cache-86422990-4215-4628-a7a7-4fdc910e304e" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 830.819173] env[62503]: DEBUG nova.network.neutron [None req-dc7fb366-4910-454c-9b67-dd826f7f0468 tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] [instance: 86422990-4215-4628-a7a7-4fdc910e304e] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 830.841314] env[62503]: DEBUG nova.scheduler.client.report [None req-d66ce770-1029-4d34-900e-8c450c39e993 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 830.885205] env[62503]: DEBUG oslo_concurrency.lockutils [None req-795b6309-d5a9-40f9-8a07-369f392b468e tempest-ServersTestManualDisk-925778780 tempest-ServersTestManualDisk-925778780-project-member] Lock "f6f17748-815c-417f-bce6-3bc97f23b637" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 128.838s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 831.186940] env[62503]: INFO nova.compute.manager [None req-1bdb5a3e-b1f3-497d-a480-68bcfc62ffc1 tempest-InstanceActionsNegativeTestJSON-133759453 tempest-InstanceActionsNegativeTestJSON-133759453-project-member] [instance: 1251e59f-9c01-4115-8400-40aacedd97e2] Took 1.02 seconds to deallocate network for instance. [ 831.345796] env[62503]: DEBUG oslo_concurrency.lockutils [None req-d66ce770-1029-4d34-900e-8c450c39e993 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.301s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 831.346376] env[62503]: DEBUG nova.compute.manager [None req-d66ce770-1029-4d34-900e-8c450c39e993 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: 4deb28e7-351b-41b7-90bb-afdde200f7fa] Start building networks asynchronously for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2832}} [ 831.350010] env[62503]: DEBUG nova.network.neutron [None req-dc7fb366-4910-454c-9b67-dd826f7f0468 tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] [instance: 86422990-4215-4628-a7a7-4fdc910e304e] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 831.352069] env[62503]: DEBUG oslo_concurrency.lockutils [None req-1d96ce89-6413-4ef6-9cd7-d46364d67260 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.667s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 831.353615] env[62503]: INFO nova.compute.claims [None req-1d96ce89-6413-4ef6-9cd7-d46364d67260 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028-project-member] [instance: 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 831.387480] env[62503]: DEBUG nova.compute.manager [None req-b540304e-d51f-4246-b92a-23f24c2fb495 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: c990f365-97df-4203-bd8c-dab822b2d8c3] Starting instance... {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 831.435449] env[62503]: DEBUG nova.network.neutron [None req-dc7fb366-4910-454c-9b67-dd826f7f0468 tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] [instance: 86422990-4215-4628-a7a7-4fdc910e304e] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 831.858722] env[62503]: DEBUG nova.compute.utils [None req-d66ce770-1029-4d34-900e-8c450c39e993 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Using /dev/sd instead of None {{(pid=62503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 831.862138] env[62503]: DEBUG nova.compute.manager [None req-d66ce770-1029-4d34-900e-8c450c39e993 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: 4deb28e7-351b-41b7-90bb-afdde200f7fa] Allocating IP information in the background. {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 831.862312] env[62503]: DEBUG nova.network.neutron [None req-d66ce770-1029-4d34-900e-8c450c39e993 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: 4deb28e7-351b-41b7-90bb-afdde200f7fa] allocate_for_instance() {{(pid=62503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 831.910180] env[62503]: DEBUG oslo_concurrency.lockutils [None req-b540304e-d51f-4246-b92a-23f24c2fb495 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 831.933247] env[62503]: DEBUG nova.policy [None req-d66ce770-1029-4d34-900e-8c450c39e993 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0b072e4c8ef94b26895d59ede518aaad', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0849093c8b48400a8e9d56171ea99e8f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62503) authorize /opt/stack/nova/nova/policy.py:201}} [ 831.938380] env[62503]: DEBUG oslo_concurrency.lockutils [None req-dc7fb366-4910-454c-9b67-dd826f7f0468 tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Releasing lock "refresh_cache-86422990-4215-4628-a7a7-4fdc910e304e" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 831.938760] env[62503]: DEBUG nova.compute.manager [None req-dc7fb366-4910-454c-9b67-dd826f7f0468 tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] [instance: 86422990-4215-4628-a7a7-4fdc910e304e] Start destroying the instance on the hypervisor. {{(pid=62503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3156}} [ 831.938952] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-dc7fb366-4910-454c-9b67-dd826f7f0468 tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] [instance: 86422990-4215-4628-a7a7-4fdc910e304e] Destroying instance {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 831.939918] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0335674a-5df2-4afc-908f-7fcd87905990 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.947602] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc7fb366-4910-454c-9b67-dd826f7f0468 tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] [instance: 86422990-4215-4628-a7a7-4fdc910e304e] Powering off the VM {{(pid=62503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 831.947862] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-82467929-7c8e-43fb-bb95-0c55a1a19968 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.953519] env[62503]: DEBUG oslo_vmware.api [None req-dc7fb366-4910-454c-9b67-dd826f7f0468 tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Waiting for the task: (returnval){ [ 831.953519] env[62503]: value = "task-1387784" [ 831.953519] env[62503]: _type = "Task" [ 831.953519] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 831.961697] env[62503]: DEBUG oslo_vmware.api [None req-dc7fb366-4910-454c-9b67-dd826f7f0468 tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Task: {'id': task-1387784, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.221243] env[62503]: INFO nova.scheduler.client.report [None req-1bdb5a3e-b1f3-497d-a480-68bcfc62ffc1 tempest-InstanceActionsNegativeTestJSON-133759453 tempest-InstanceActionsNegativeTestJSON-133759453-project-member] Deleted allocations for instance 1251e59f-9c01-4115-8400-40aacedd97e2 [ 832.339234] env[62503]: DEBUG nova.network.neutron [None req-d66ce770-1029-4d34-900e-8c450c39e993 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: 4deb28e7-351b-41b7-90bb-afdde200f7fa] Successfully created port: 3efea7b6-754e-4a5c-93d0-b78ff2e5133e {{(pid=62503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 832.363703] env[62503]: DEBUG nova.compute.manager [None req-d66ce770-1029-4d34-900e-8c450c39e993 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: 4deb28e7-351b-41b7-90bb-afdde200f7fa] Start building block device mappings for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2867}} [ 832.467444] env[62503]: DEBUG oslo_vmware.api [None req-dc7fb366-4910-454c-9b67-dd826f7f0468 tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Task: {'id': task-1387784, 'name': PowerOffVM_Task, 'duration_secs': 0.104042} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 832.469928] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc7fb366-4910-454c-9b67-dd826f7f0468 tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] [instance: 86422990-4215-4628-a7a7-4fdc910e304e] Powered off the VM {{(pid=62503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 832.470129] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-dc7fb366-4910-454c-9b67-dd826f7f0468 tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] [instance: 86422990-4215-4628-a7a7-4fdc910e304e] Unregistering the VM {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 832.470962] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9fefd701-af0c-4e41-bdb4-d0cb2395f518 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.498218] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-dc7fb366-4910-454c-9b67-dd826f7f0468 tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] [instance: 86422990-4215-4628-a7a7-4fdc910e304e] Unregistered the VM {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 832.498436] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-dc7fb366-4910-454c-9b67-dd826f7f0468 tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] [instance: 86422990-4215-4628-a7a7-4fdc910e304e] Deleting contents of the VM from datastore datastore1 {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 832.498678] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-dc7fb366-4910-454c-9b67-dd826f7f0468 tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Deleting the datastore file [datastore1] 86422990-4215-4628-a7a7-4fdc910e304e {{(pid=62503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 832.498862] env[62503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cf5258a7-0bf3-4845-b816-2b2a21eddb5a {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.506048] env[62503]: DEBUG oslo_vmware.api [None req-dc7fb366-4910-454c-9b67-dd826f7f0468 tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Waiting for the task: (returnval){ [ 832.506048] env[62503]: value = "task-1387786" [ 832.506048] env[62503]: _type = "Task" [ 832.506048] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 832.516510] env[62503]: DEBUG oslo_vmware.api [None req-dc7fb366-4910-454c-9b67-dd826f7f0468 tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Task: {'id': task-1387786, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.585043] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0eed7942-1a88-4855-8c22-d68b9198412e {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.593109] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3efc3b47-0b90-4766-9945-69d9d1341f33 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.625277] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4d9ebca-6f32-49d9-b389-05f2c0f97b28 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.632889] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ad57ec2-8dfb-4cde-8afd-4f45230809cf {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.646079] env[62503]: DEBUG nova.compute.provider_tree [None req-1d96ce89-6413-4ef6-9cd7-d46364d67260 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028-project-member] Updating inventory in ProviderTree for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 832.730896] env[62503]: DEBUG oslo_concurrency.lockutils [None req-1bdb5a3e-b1f3-497d-a480-68bcfc62ffc1 tempest-InstanceActionsNegativeTestJSON-133759453 tempest-InstanceActionsNegativeTestJSON-133759453-project-member] Lock "1251e59f-9c01-4115-8400-40aacedd97e2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 128.011s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 833.018510] env[62503]: DEBUG oslo_vmware.api [None req-dc7fb366-4910-454c-9b67-dd826f7f0468 tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Task: {'id': task-1387786, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.106473} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 833.018866] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-dc7fb366-4910-454c-9b67-dd826f7f0468 tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Deleted the datastore file {{(pid=62503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 833.019070] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-dc7fb366-4910-454c-9b67-dd826f7f0468 tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] [instance: 86422990-4215-4628-a7a7-4fdc910e304e] Deleted contents of the VM from datastore datastore1 {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 833.019253] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-dc7fb366-4910-454c-9b67-dd826f7f0468 tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] [instance: 86422990-4215-4628-a7a7-4fdc910e304e] Instance destroyed {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 833.019424] env[62503]: INFO nova.compute.manager [None req-dc7fb366-4910-454c-9b67-dd826f7f0468 tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] [instance: 86422990-4215-4628-a7a7-4fdc910e304e] Took 1.08 seconds to destroy the instance on the hypervisor. [ 833.019690] env[62503]: DEBUG oslo.service.loopingcall [None req-dc7fb366-4910-454c-9b67-dd826f7f0468 tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 833.019889] env[62503]: DEBUG nova.compute.manager [-] [instance: 86422990-4215-4628-a7a7-4fdc910e304e] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 833.019981] env[62503]: DEBUG nova.network.neutron [-] [instance: 86422990-4215-4628-a7a7-4fdc910e304e] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 833.036264] env[62503]: DEBUG nova.network.neutron [-] [instance: 86422990-4215-4628-a7a7-4fdc910e304e] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 833.169576] env[62503]: ERROR nova.scheduler.client.report [None req-1d96ce89-6413-4ef6-9cd7-d46364d67260 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028-project-member] [req-49eacbf4-3eb1-42b1-9c24-05c728fc62e8] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-49eacbf4-3eb1-42b1-9c24-05c728fc62e8"}]} [ 833.192634] env[62503]: DEBUG nova.scheduler.client.report [None req-1d96ce89-6413-4ef6-9cd7-d46364d67260 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028-project-member] Refreshing inventories for resource provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:819}} [ 833.219991] env[62503]: DEBUG nova.scheduler.client.report [None req-1d96ce89-6413-4ef6-9cd7-d46364d67260 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028-project-member] Updating ProviderTree inventory for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:783}} [ 833.220226] env[62503]: DEBUG nova.compute.provider_tree [None req-1d96ce89-6413-4ef6-9cd7-d46364d67260 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028-project-member] Updating inventory in ProviderTree for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 833.235236] env[62503]: DEBUG nova.compute.manager [None req-35e1767c-0254-4b7e-b241-cbaf9c357109 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: 9ccdc727-536e-4db8-bad4-960858254758] Starting instance... {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 833.241256] env[62503]: DEBUG nova.scheduler.client.report [None req-1d96ce89-6413-4ef6-9cd7-d46364d67260 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028-project-member] Refreshing aggregate associations for resource provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2, aggregates: None {{(pid=62503) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:828}} [ 833.246513] env[62503]: DEBUG nova.compute.manager [req-0cd2c606-7e41-43b0-b0ed-5f291816001c req-bc0fbf8b-77f5-4f3e-adc0-fc011903da4b service nova] [instance: 4deb28e7-351b-41b7-90bb-afdde200f7fa] Received event network-changed-3efea7b6-754e-4a5c-93d0-b78ff2e5133e {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 833.249023] env[62503]: DEBUG nova.compute.manager [req-0cd2c606-7e41-43b0-b0ed-5f291816001c req-bc0fbf8b-77f5-4f3e-adc0-fc011903da4b service nova] [instance: 4deb28e7-351b-41b7-90bb-afdde200f7fa] Refreshing instance network info cache due to event network-changed-3efea7b6-754e-4a5c-93d0-b78ff2e5133e. {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11432}} [ 833.249023] env[62503]: DEBUG oslo_concurrency.lockutils [req-0cd2c606-7e41-43b0-b0ed-5f291816001c req-bc0fbf8b-77f5-4f3e-adc0-fc011903da4b service nova] Acquiring lock "refresh_cache-4deb28e7-351b-41b7-90bb-afdde200f7fa" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 833.249023] env[62503]: DEBUG oslo_concurrency.lockutils [req-0cd2c606-7e41-43b0-b0ed-5f291816001c req-bc0fbf8b-77f5-4f3e-adc0-fc011903da4b service nova] Acquired lock "refresh_cache-4deb28e7-351b-41b7-90bb-afdde200f7fa" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 833.249023] env[62503]: DEBUG nova.network.neutron [req-0cd2c606-7e41-43b0-b0ed-5f291816001c req-bc0fbf8b-77f5-4f3e-adc0-fc011903da4b service nova] [instance: 4deb28e7-351b-41b7-90bb-afdde200f7fa] Refreshing network info cache for port 3efea7b6-754e-4a5c-93d0-b78ff2e5133e {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 833.267538] env[62503]: DEBUG nova.scheduler.client.report [None req-1d96ce89-6413-4ef6-9cd7-d46364d67260 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028-project-member] Refreshing trait associations for resource provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2, traits: HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=62503) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:840}} [ 833.386211] env[62503]: DEBUG nova.compute.manager [None req-d66ce770-1029-4d34-900e-8c450c39e993 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: 4deb28e7-351b-41b7-90bb-afdde200f7fa] Start spawning the instance on the hypervisor. {{(pid=62503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2641}} [ 833.418582] env[62503]: DEBUG nova.virt.hardware [None req-d66ce770-1029-4d34-900e-8c450c39e993 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:26:20Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:26:03Z,direct_url=,disk_format='vmdk',id=8150ca02-f879-471d-8913-459408f127a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26d9ee75d25b4018b8bb1fb11c8bd98c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:26:04Z,virtual_size=,visibility=), allow threads: False {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 833.418973] env[62503]: DEBUG nova.virt.hardware [None req-d66ce770-1029-4d34-900e-8c450c39e993 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Flavor limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 833.419231] env[62503]: DEBUG nova.virt.hardware [None req-d66ce770-1029-4d34-900e-8c450c39e993 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Image limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 833.419471] env[62503]: DEBUG nova.virt.hardware [None req-d66ce770-1029-4d34-900e-8c450c39e993 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Flavor pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 833.420012] env[62503]: DEBUG nova.virt.hardware [None req-d66ce770-1029-4d34-900e-8c450c39e993 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Image pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 833.421306] env[62503]: DEBUG nova.virt.hardware [None req-d66ce770-1029-4d34-900e-8c450c39e993 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 833.422637] env[62503]: DEBUG nova.virt.hardware [None req-d66ce770-1029-4d34-900e-8c450c39e993 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 833.422637] env[62503]: DEBUG nova.virt.hardware [None req-d66ce770-1029-4d34-900e-8c450c39e993 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 833.422906] env[62503]: DEBUG nova.virt.hardware [None req-d66ce770-1029-4d34-900e-8c450c39e993 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Got 1 possible topologies {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 833.423221] env[62503]: DEBUG nova.virt.hardware [None req-d66ce770-1029-4d34-900e-8c450c39e993 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 833.424542] env[62503]: DEBUG nova.virt.hardware [None req-d66ce770-1029-4d34-900e-8c450c39e993 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 833.428527] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b156375d-1cf7-4ff8-9b0f-cf9f00a8b71d {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.441098] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53cda19e-f2af-4547-a95e-4fe5b6c1e79a {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.499713] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f952d592-9db5-4b18-8b89-ea04021bd008 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.507667] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87ad31fa-4899-4f57-8dfd-c30882693803 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.540635] env[62503]: DEBUG nova.network.neutron [-] [instance: 86422990-4215-4628-a7a7-4fdc910e304e] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 833.543308] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55c162ec-6bcd-43bf-b6fc-2e983aa42d3f {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.553518] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d4f5d79-1d3e-42d3-ad7e-e61b9b63df7f {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.571142] env[62503]: DEBUG nova.compute.provider_tree [None req-1d96ce89-6413-4ef6-9cd7-d46364d67260 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028-project-member] Updating inventory in ProviderTree for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 833.703183] env[62503]: ERROR nova.compute.manager [None req-d66ce770-1029-4d34-900e-8c450c39e993 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 3efea7b6-754e-4a5c-93d0-b78ff2e5133e, please check neutron logs for more information. [ 833.703183] env[62503]: ERROR nova.compute.manager Traceback (most recent call last): [ 833.703183] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 833.703183] env[62503]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 833.703183] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 833.703183] env[62503]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 833.703183] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 833.703183] env[62503]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 833.703183] env[62503]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 833.703183] env[62503]: ERROR nova.compute.manager self.force_reraise() [ 833.703183] env[62503]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 833.703183] env[62503]: ERROR nova.compute.manager raise self.value [ 833.703183] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 833.703183] env[62503]: ERROR nova.compute.manager updated_port = self._update_port( [ 833.703183] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 833.703183] env[62503]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 833.703673] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 833.703673] env[62503]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 833.703673] env[62503]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 3efea7b6-754e-4a5c-93d0-b78ff2e5133e, please check neutron logs for more information. [ 833.703673] env[62503]: ERROR nova.compute.manager [ 833.703673] env[62503]: Traceback (most recent call last): [ 833.703673] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 833.703673] env[62503]: listener.cb(fileno) [ 833.703673] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 833.703673] env[62503]: result = function(*args, **kwargs) [ 833.703673] env[62503]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 833.703673] env[62503]: return func(*args, **kwargs) [ 833.703673] env[62503]: File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 833.703673] env[62503]: raise e [ 833.703673] env[62503]: File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 833.703673] env[62503]: nwinfo = self.network_api.allocate_for_instance( [ 833.703673] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 833.703673] env[62503]: created_port_ids = self._update_ports_for_instance( [ 833.703673] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 833.703673] env[62503]: with excutils.save_and_reraise_exception(): [ 833.703673] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 833.703673] env[62503]: self.force_reraise() [ 833.703673] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 833.703673] env[62503]: raise self.value [ 833.703673] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 833.703673] env[62503]: updated_port = self._update_port( [ 833.703673] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 833.703673] env[62503]: _ensure_no_port_binding_failure(port) [ 833.703673] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 833.703673] env[62503]: raise exception.PortBindingFailed(port_id=port['id']) [ 833.704339] env[62503]: nova.exception.PortBindingFailed: Binding failed for port 3efea7b6-754e-4a5c-93d0-b78ff2e5133e, please check neutron logs for more information. [ 833.704339] env[62503]: Removing descriptor: 14 [ 833.704339] env[62503]: ERROR nova.compute.manager [None req-d66ce770-1029-4d34-900e-8c450c39e993 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: 4deb28e7-351b-41b7-90bb-afdde200f7fa] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 3efea7b6-754e-4a5c-93d0-b78ff2e5133e, please check neutron logs for more information. [ 833.704339] env[62503]: ERROR nova.compute.manager [instance: 4deb28e7-351b-41b7-90bb-afdde200f7fa] Traceback (most recent call last): [ 833.704339] env[62503]: ERROR nova.compute.manager [instance: 4deb28e7-351b-41b7-90bb-afdde200f7fa] File "/opt/stack/nova/nova/compute/manager.py", line 2897, in _build_resources [ 833.704339] env[62503]: ERROR nova.compute.manager [instance: 4deb28e7-351b-41b7-90bb-afdde200f7fa] yield resources [ 833.704339] env[62503]: ERROR nova.compute.manager [instance: 4deb28e7-351b-41b7-90bb-afdde200f7fa] File "/opt/stack/nova/nova/compute/manager.py", line 2644, in _build_and_run_instance [ 833.704339] env[62503]: ERROR nova.compute.manager [instance: 4deb28e7-351b-41b7-90bb-afdde200f7fa] self.driver.spawn(context, instance, image_meta, [ 833.704339] env[62503]: ERROR nova.compute.manager [instance: 4deb28e7-351b-41b7-90bb-afdde200f7fa] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 833.704339] env[62503]: ERROR nova.compute.manager [instance: 4deb28e7-351b-41b7-90bb-afdde200f7fa] self._vmops.spawn(context, instance, image_meta, injected_files, [ 833.704339] env[62503]: ERROR nova.compute.manager [instance: 4deb28e7-351b-41b7-90bb-afdde200f7fa] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 833.704339] env[62503]: ERROR nova.compute.manager [instance: 4deb28e7-351b-41b7-90bb-afdde200f7fa] vm_ref = self.build_virtual_machine(instance, [ 833.704656] env[62503]: ERROR nova.compute.manager [instance: 4deb28e7-351b-41b7-90bb-afdde200f7fa] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 833.704656] env[62503]: ERROR nova.compute.manager [instance: 4deb28e7-351b-41b7-90bb-afdde200f7fa] vif_infos = vmwarevif.get_vif_info(self._session, [ 833.704656] env[62503]: ERROR nova.compute.manager [instance: 4deb28e7-351b-41b7-90bb-afdde200f7fa] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 833.704656] env[62503]: ERROR nova.compute.manager [instance: 4deb28e7-351b-41b7-90bb-afdde200f7fa] for vif in network_info: [ 833.704656] env[62503]: ERROR nova.compute.manager [instance: 4deb28e7-351b-41b7-90bb-afdde200f7fa] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 833.704656] env[62503]: ERROR nova.compute.manager [instance: 4deb28e7-351b-41b7-90bb-afdde200f7fa] return self._sync_wrapper(fn, *args, **kwargs) [ 833.704656] env[62503]: ERROR nova.compute.manager [instance: 4deb28e7-351b-41b7-90bb-afdde200f7fa] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 833.704656] env[62503]: ERROR nova.compute.manager [instance: 4deb28e7-351b-41b7-90bb-afdde200f7fa] self.wait() [ 833.704656] env[62503]: ERROR nova.compute.manager [instance: 4deb28e7-351b-41b7-90bb-afdde200f7fa] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 833.704656] env[62503]: ERROR nova.compute.manager [instance: 4deb28e7-351b-41b7-90bb-afdde200f7fa] self[:] = self._gt.wait() [ 833.704656] env[62503]: ERROR nova.compute.manager [instance: 4deb28e7-351b-41b7-90bb-afdde200f7fa] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 833.704656] env[62503]: ERROR nova.compute.manager [instance: 4deb28e7-351b-41b7-90bb-afdde200f7fa] return self._exit_event.wait() [ 833.704656] env[62503]: ERROR nova.compute.manager [instance: 4deb28e7-351b-41b7-90bb-afdde200f7fa] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 833.704967] env[62503]: ERROR nova.compute.manager [instance: 4deb28e7-351b-41b7-90bb-afdde200f7fa] result = hub.switch() [ 833.704967] env[62503]: ERROR nova.compute.manager [instance: 4deb28e7-351b-41b7-90bb-afdde200f7fa] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 833.704967] env[62503]: ERROR nova.compute.manager [instance: 4deb28e7-351b-41b7-90bb-afdde200f7fa] return self.greenlet.switch() [ 833.704967] env[62503]: ERROR nova.compute.manager [instance: 4deb28e7-351b-41b7-90bb-afdde200f7fa] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 833.704967] env[62503]: ERROR nova.compute.manager [instance: 4deb28e7-351b-41b7-90bb-afdde200f7fa] result = function(*args, **kwargs) [ 833.704967] env[62503]: ERROR nova.compute.manager [instance: 4deb28e7-351b-41b7-90bb-afdde200f7fa] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 833.704967] env[62503]: ERROR nova.compute.manager [instance: 4deb28e7-351b-41b7-90bb-afdde200f7fa] return func(*args, **kwargs) [ 833.704967] env[62503]: ERROR nova.compute.manager [instance: 4deb28e7-351b-41b7-90bb-afdde200f7fa] File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 833.704967] env[62503]: ERROR nova.compute.manager [instance: 4deb28e7-351b-41b7-90bb-afdde200f7fa] raise e [ 833.704967] env[62503]: ERROR nova.compute.manager [instance: 4deb28e7-351b-41b7-90bb-afdde200f7fa] File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 833.704967] env[62503]: ERROR nova.compute.manager [instance: 4deb28e7-351b-41b7-90bb-afdde200f7fa] nwinfo = self.network_api.allocate_for_instance( [ 833.704967] env[62503]: ERROR nova.compute.manager [instance: 4deb28e7-351b-41b7-90bb-afdde200f7fa] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 833.704967] env[62503]: ERROR nova.compute.manager [instance: 4deb28e7-351b-41b7-90bb-afdde200f7fa] created_port_ids = self._update_ports_for_instance( [ 833.705272] env[62503]: ERROR nova.compute.manager [instance: 4deb28e7-351b-41b7-90bb-afdde200f7fa] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 833.705272] env[62503]: ERROR nova.compute.manager [instance: 4deb28e7-351b-41b7-90bb-afdde200f7fa] with excutils.save_and_reraise_exception(): [ 833.705272] env[62503]: ERROR nova.compute.manager [instance: 4deb28e7-351b-41b7-90bb-afdde200f7fa] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 833.705272] env[62503]: ERROR nova.compute.manager [instance: 4deb28e7-351b-41b7-90bb-afdde200f7fa] self.force_reraise() [ 833.705272] env[62503]: ERROR nova.compute.manager [instance: 4deb28e7-351b-41b7-90bb-afdde200f7fa] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 833.705272] env[62503]: ERROR nova.compute.manager [instance: 4deb28e7-351b-41b7-90bb-afdde200f7fa] raise self.value [ 833.705272] env[62503]: ERROR nova.compute.manager [instance: 4deb28e7-351b-41b7-90bb-afdde200f7fa] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 833.705272] env[62503]: ERROR nova.compute.manager [instance: 4deb28e7-351b-41b7-90bb-afdde200f7fa] updated_port = self._update_port( [ 833.705272] env[62503]: ERROR nova.compute.manager [instance: 4deb28e7-351b-41b7-90bb-afdde200f7fa] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 833.705272] env[62503]: ERROR nova.compute.manager [instance: 4deb28e7-351b-41b7-90bb-afdde200f7fa] _ensure_no_port_binding_failure(port) [ 833.705272] env[62503]: ERROR nova.compute.manager [instance: 4deb28e7-351b-41b7-90bb-afdde200f7fa] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 833.705272] env[62503]: ERROR nova.compute.manager [instance: 4deb28e7-351b-41b7-90bb-afdde200f7fa] raise exception.PortBindingFailed(port_id=port['id']) [ 833.705541] env[62503]: ERROR nova.compute.manager [instance: 4deb28e7-351b-41b7-90bb-afdde200f7fa] nova.exception.PortBindingFailed: Binding failed for port 3efea7b6-754e-4a5c-93d0-b78ff2e5133e, please check neutron logs for more information. [ 833.705541] env[62503]: ERROR nova.compute.manager [instance: 4deb28e7-351b-41b7-90bb-afdde200f7fa] [ 833.705541] env[62503]: INFO nova.compute.manager [None req-d66ce770-1029-4d34-900e-8c450c39e993 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: 4deb28e7-351b-41b7-90bb-afdde200f7fa] Terminating instance [ 833.706886] env[62503]: DEBUG oslo_concurrency.lockutils [None req-d66ce770-1029-4d34-900e-8c450c39e993 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Acquiring lock "refresh_cache-4deb28e7-351b-41b7-90bb-afdde200f7fa" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 833.769759] env[62503]: DEBUG oslo_concurrency.lockutils [None req-35e1767c-0254-4b7e-b241-cbaf9c357109 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 833.779807] env[62503]: DEBUG nova.network.neutron [req-0cd2c606-7e41-43b0-b0ed-5f291816001c req-bc0fbf8b-77f5-4f3e-adc0-fc011903da4b service nova] [instance: 4deb28e7-351b-41b7-90bb-afdde200f7fa] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 833.936698] env[62503]: DEBUG nova.network.neutron [req-0cd2c606-7e41-43b0-b0ed-5f291816001c req-bc0fbf8b-77f5-4f3e-adc0-fc011903da4b service nova] [instance: 4deb28e7-351b-41b7-90bb-afdde200f7fa] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 834.044225] env[62503]: INFO nova.compute.manager [-] [instance: 86422990-4215-4628-a7a7-4fdc910e304e] Took 1.02 seconds to deallocate network for instance. [ 834.109419] env[62503]: DEBUG nova.scheduler.client.report [None req-1d96ce89-6413-4ef6-9cd7-d46364d67260 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028-project-member] Updated inventory for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 with generation 81 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:972}} [ 834.109777] env[62503]: DEBUG nova.compute.provider_tree [None req-1d96ce89-6413-4ef6-9cd7-d46364d67260 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028-project-member] Updating resource provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 generation from 81 to 82 during operation: update_inventory {{(pid=62503) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 834.109973] env[62503]: DEBUG nova.compute.provider_tree [None req-1d96ce89-6413-4ef6-9cd7-d46364d67260 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028-project-member] Updating inventory in ProviderTree for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 834.439177] env[62503]: DEBUG oslo_concurrency.lockutils [req-0cd2c606-7e41-43b0-b0ed-5f291816001c req-bc0fbf8b-77f5-4f3e-adc0-fc011903da4b service nova] Releasing lock "refresh_cache-4deb28e7-351b-41b7-90bb-afdde200f7fa" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 834.443445] env[62503]: DEBUG oslo_concurrency.lockutils [None req-d66ce770-1029-4d34-900e-8c450c39e993 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Acquired lock "refresh_cache-4deb28e7-351b-41b7-90bb-afdde200f7fa" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 834.443445] env[62503]: DEBUG nova.network.neutron [None req-d66ce770-1029-4d34-900e-8c450c39e993 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: 4deb28e7-351b-41b7-90bb-afdde200f7fa] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 834.552179] env[62503]: DEBUG oslo_concurrency.lockutils [None req-dc7fb366-4910-454c-9b67-dd826f7f0468 tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 834.615711] env[62503]: DEBUG oslo_concurrency.lockutils [None req-1d96ce89-6413-4ef6-9cd7-d46364d67260 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.264s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 834.617126] env[62503]: DEBUG nova.compute.manager [None req-1d96ce89-6413-4ef6-9cd7-d46364d67260 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028-project-member] [instance: 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2] Start building networks asynchronously for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2832}} [ 834.627031] env[62503]: DEBUG oslo_concurrency.lockutils [None req-47a18854-ca82-4c3a-9a6e-798e903be44b tempest-ServerRescueTestJSONUnderV235-862506334 tempest-ServerRescueTestJSONUnderV235-862506334-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 13.764s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 834.966371] env[62503]: DEBUG nova.network.neutron [None req-d66ce770-1029-4d34-900e-8c450c39e993 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: 4deb28e7-351b-41b7-90bb-afdde200f7fa] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 835.084070] env[62503]: DEBUG nova.network.neutron [None req-d66ce770-1029-4d34-900e-8c450c39e993 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: 4deb28e7-351b-41b7-90bb-afdde200f7fa] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 835.132195] env[62503]: DEBUG nova.compute.utils [None req-1d96ce89-6413-4ef6-9cd7-d46364d67260 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028-project-member] Using /dev/sd instead of None {{(pid=62503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 835.137821] env[62503]: DEBUG nova.compute.manager [None req-1d96ce89-6413-4ef6-9cd7-d46364d67260 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028-project-member] [instance: 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2] Allocating IP information in the background. {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 835.138747] env[62503]: DEBUG nova.network.neutron [None req-1d96ce89-6413-4ef6-9cd7-d46364d67260 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028-project-member] [instance: 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2] allocate_for_instance() {{(pid=62503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 835.191853] env[62503]: DEBUG nova.policy [None req-1d96ce89-6413-4ef6-9cd7-d46364d67260 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b91f37423644457180c2b99b2bce5794', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '340717b62db844469f32b7c985ce9380', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62503) authorize /opt/stack/nova/nova/policy.py:201}} [ 835.390490] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7cbccf5-9ceb-4bd8-be6b-86ff695cc38f {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.398892] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afb5672d-ebaa-4b81-a1ac-34757c41b0dc {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.436871] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55d0118e-2aff-4dc7-880b-2beceb3e91ab {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.440391] env[62503]: DEBUG nova.compute.manager [req-45efaf1f-9b72-49eb-ab1c-1e89eb6d1115 req-7f0d8b55-508b-4dda-8703-bf55db532543 service nova] [instance: 4deb28e7-351b-41b7-90bb-afdde200f7fa] Received event network-vif-deleted-3efea7b6-754e-4a5c-93d0-b78ff2e5133e {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 835.446178] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-245eb1e9-3522-49f1-9978-389cea0c82d7 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.465287] env[62503]: DEBUG nova.compute.provider_tree [None req-47a18854-ca82-4c3a-9a6e-798e903be44b tempest-ServerRescueTestJSONUnderV235-862506334 tempest-ServerRescueTestJSONUnderV235-862506334-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 835.588999] env[62503]: DEBUG oslo_concurrency.lockutils [None req-d66ce770-1029-4d34-900e-8c450c39e993 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Releasing lock "refresh_cache-4deb28e7-351b-41b7-90bb-afdde200f7fa" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 835.588999] env[62503]: DEBUG nova.compute.manager [None req-d66ce770-1029-4d34-900e-8c450c39e993 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: 4deb28e7-351b-41b7-90bb-afdde200f7fa] Start destroying the instance on the hypervisor. {{(pid=62503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3156}} [ 835.588999] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-d66ce770-1029-4d34-900e-8c450c39e993 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: 4deb28e7-351b-41b7-90bb-afdde200f7fa] Destroying instance {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 835.588999] env[62503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b5d8d8f5-b909-43c5-a093-c897c3fcb20c {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.590863] env[62503]: DEBUG nova.network.neutron [None req-1d96ce89-6413-4ef6-9cd7-d46364d67260 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028-project-member] [instance: 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2] Successfully created port: 7de7e64b-635a-4946-9db0-68ebc5ffbf63 {{(pid=62503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 835.599429] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-957a6cf1-2e91-4b6a-896d-eceeb7ce76aa {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.624018] env[62503]: WARNING nova.virt.vmwareapi.vmops [None req-d66ce770-1029-4d34-900e-8c450c39e993 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: 4deb28e7-351b-41b7-90bb-afdde200f7fa] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 4deb28e7-351b-41b7-90bb-afdde200f7fa could not be found. [ 835.624312] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-d66ce770-1029-4d34-900e-8c450c39e993 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: 4deb28e7-351b-41b7-90bb-afdde200f7fa] Instance destroyed {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 835.624538] env[62503]: INFO nova.compute.manager [None req-d66ce770-1029-4d34-900e-8c450c39e993 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: 4deb28e7-351b-41b7-90bb-afdde200f7fa] Took 0.04 seconds to destroy the instance on the hypervisor. [ 835.624846] env[62503]: DEBUG oslo.service.loopingcall [None req-d66ce770-1029-4d34-900e-8c450c39e993 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 835.625139] env[62503]: DEBUG nova.compute.manager [-] [instance: 4deb28e7-351b-41b7-90bb-afdde200f7fa] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 835.625278] env[62503]: DEBUG nova.network.neutron [-] [instance: 4deb28e7-351b-41b7-90bb-afdde200f7fa] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 835.637822] env[62503]: DEBUG nova.compute.manager [None req-1d96ce89-6413-4ef6-9cd7-d46364d67260 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028-project-member] [instance: 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2] Start building block device mappings for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2867}} [ 835.643657] env[62503]: DEBUG nova.network.neutron [-] [instance: 4deb28e7-351b-41b7-90bb-afdde200f7fa] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 835.972451] env[62503]: DEBUG nova.scheduler.client.report [None req-47a18854-ca82-4c3a-9a6e-798e903be44b tempest-ServerRescueTestJSONUnderV235-862506334 tempest-ServerRescueTestJSONUnderV235-862506334-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 836.152382] env[62503]: DEBUG nova.network.neutron [-] [instance: 4deb28e7-351b-41b7-90bb-afdde200f7fa] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 836.480093] env[62503]: DEBUG oslo_concurrency.lockutils [None req-47a18854-ca82-4c3a-9a6e-798e903be44b tempest-ServerRescueTestJSONUnderV235-862506334 tempest-ServerRescueTestJSONUnderV235-862506334-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.856s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 836.480800] env[62503]: ERROR nova.compute.manager [None req-47a18854-ca82-4c3a-9a6e-798e903be44b tempest-ServerRescueTestJSONUnderV235-862506334 tempest-ServerRescueTestJSONUnderV235-862506334-project-member] [instance: 4cb117e3-ff57-4e7f-bb2b-a12c988e362c] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port bf3af8e0-63c1-45af-8f69-da97b8b24149, please check neutron logs for more information. [ 836.480800] env[62503]: ERROR nova.compute.manager [instance: 4cb117e3-ff57-4e7f-bb2b-a12c988e362c] Traceback (most recent call last): [ 836.480800] env[62503]: ERROR nova.compute.manager [instance: 4cb117e3-ff57-4e7f-bb2b-a12c988e362c] File "/opt/stack/nova/nova/compute/manager.py", line 2644, in _build_and_run_instance [ 836.480800] env[62503]: ERROR nova.compute.manager [instance: 4cb117e3-ff57-4e7f-bb2b-a12c988e362c] self.driver.spawn(context, instance, image_meta, [ 836.480800] env[62503]: ERROR nova.compute.manager [instance: 4cb117e3-ff57-4e7f-bb2b-a12c988e362c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 836.480800] env[62503]: ERROR nova.compute.manager [instance: 4cb117e3-ff57-4e7f-bb2b-a12c988e362c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 836.480800] env[62503]: ERROR nova.compute.manager [instance: 4cb117e3-ff57-4e7f-bb2b-a12c988e362c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 836.480800] env[62503]: ERROR nova.compute.manager [instance: 4cb117e3-ff57-4e7f-bb2b-a12c988e362c] vm_ref = self.build_virtual_machine(instance, [ 836.480800] env[62503]: ERROR nova.compute.manager [instance: 4cb117e3-ff57-4e7f-bb2b-a12c988e362c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 836.480800] env[62503]: ERROR nova.compute.manager [instance: 4cb117e3-ff57-4e7f-bb2b-a12c988e362c] vif_infos = vmwarevif.get_vif_info(self._session, [ 836.480800] env[62503]: ERROR nova.compute.manager [instance: 4cb117e3-ff57-4e7f-bb2b-a12c988e362c] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 836.481103] env[62503]: ERROR nova.compute.manager [instance: 4cb117e3-ff57-4e7f-bb2b-a12c988e362c] for vif in network_info: [ 836.481103] env[62503]: ERROR nova.compute.manager [instance: 4cb117e3-ff57-4e7f-bb2b-a12c988e362c] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 836.481103] env[62503]: ERROR nova.compute.manager [instance: 4cb117e3-ff57-4e7f-bb2b-a12c988e362c] return self._sync_wrapper(fn, *args, **kwargs) [ 836.481103] env[62503]: ERROR nova.compute.manager [instance: 4cb117e3-ff57-4e7f-bb2b-a12c988e362c] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 836.481103] env[62503]: ERROR nova.compute.manager [instance: 4cb117e3-ff57-4e7f-bb2b-a12c988e362c] self.wait() [ 836.481103] env[62503]: ERROR nova.compute.manager [instance: 4cb117e3-ff57-4e7f-bb2b-a12c988e362c] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 836.481103] env[62503]: ERROR nova.compute.manager [instance: 4cb117e3-ff57-4e7f-bb2b-a12c988e362c] self[:] = self._gt.wait() [ 836.481103] env[62503]: ERROR nova.compute.manager [instance: 4cb117e3-ff57-4e7f-bb2b-a12c988e362c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 836.481103] env[62503]: ERROR nova.compute.manager [instance: 4cb117e3-ff57-4e7f-bb2b-a12c988e362c] return self._exit_event.wait() [ 836.481103] env[62503]: ERROR nova.compute.manager [instance: 4cb117e3-ff57-4e7f-bb2b-a12c988e362c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 836.481103] env[62503]: ERROR nova.compute.manager [instance: 4cb117e3-ff57-4e7f-bb2b-a12c988e362c] current.throw(*self._exc) [ 836.481103] env[62503]: ERROR nova.compute.manager [instance: 4cb117e3-ff57-4e7f-bb2b-a12c988e362c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 836.481103] env[62503]: ERROR nova.compute.manager [instance: 4cb117e3-ff57-4e7f-bb2b-a12c988e362c] result = function(*args, **kwargs) [ 836.482405] env[62503]: ERROR nova.compute.manager [instance: 4cb117e3-ff57-4e7f-bb2b-a12c988e362c] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 836.482405] env[62503]: ERROR nova.compute.manager [instance: 4cb117e3-ff57-4e7f-bb2b-a12c988e362c] return func(*args, **kwargs) [ 836.482405] env[62503]: ERROR nova.compute.manager [instance: 4cb117e3-ff57-4e7f-bb2b-a12c988e362c] File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 836.482405] env[62503]: ERROR nova.compute.manager [instance: 4cb117e3-ff57-4e7f-bb2b-a12c988e362c] raise e [ 836.482405] env[62503]: ERROR nova.compute.manager [instance: 4cb117e3-ff57-4e7f-bb2b-a12c988e362c] File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 836.482405] env[62503]: ERROR nova.compute.manager [instance: 4cb117e3-ff57-4e7f-bb2b-a12c988e362c] nwinfo = self.network_api.allocate_for_instance( [ 836.482405] env[62503]: ERROR nova.compute.manager [instance: 4cb117e3-ff57-4e7f-bb2b-a12c988e362c] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 836.482405] env[62503]: ERROR nova.compute.manager [instance: 4cb117e3-ff57-4e7f-bb2b-a12c988e362c] created_port_ids = self._update_ports_for_instance( [ 836.482405] env[62503]: ERROR nova.compute.manager [instance: 4cb117e3-ff57-4e7f-bb2b-a12c988e362c] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 836.482405] env[62503]: ERROR nova.compute.manager [instance: 4cb117e3-ff57-4e7f-bb2b-a12c988e362c] with excutils.save_and_reraise_exception(): [ 836.482405] env[62503]: ERROR nova.compute.manager [instance: 4cb117e3-ff57-4e7f-bb2b-a12c988e362c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 836.482405] env[62503]: ERROR nova.compute.manager [instance: 4cb117e3-ff57-4e7f-bb2b-a12c988e362c] self.force_reraise() [ 836.482405] env[62503]: ERROR nova.compute.manager [instance: 4cb117e3-ff57-4e7f-bb2b-a12c988e362c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 836.482795] env[62503]: ERROR nova.compute.manager [instance: 4cb117e3-ff57-4e7f-bb2b-a12c988e362c] raise self.value [ 836.482795] env[62503]: ERROR nova.compute.manager [instance: 4cb117e3-ff57-4e7f-bb2b-a12c988e362c] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 836.482795] env[62503]: ERROR nova.compute.manager [instance: 4cb117e3-ff57-4e7f-bb2b-a12c988e362c] updated_port = self._update_port( [ 836.482795] env[62503]: ERROR nova.compute.manager [instance: 4cb117e3-ff57-4e7f-bb2b-a12c988e362c] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 836.482795] env[62503]: ERROR nova.compute.manager [instance: 4cb117e3-ff57-4e7f-bb2b-a12c988e362c] _ensure_no_port_binding_failure(port) [ 836.482795] env[62503]: ERROR nova.compute.manager [instance: 4cb117e3-ff57-4e7f-bb2b-a12c988e362c] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 836.482795] env[62503]: ERROR nova.compute.manager [instance: 4cb117e3-ff57-4e7f-bb2b-a12c988e362c] raise exception.PortBindingFailed(port_id=port['id']) [ 836.482795] env[62503]: ERROR nova.compute.manager [instance: 4cb117e3-ff57-4e7f-bb2b-a12c988e362c] nova.exception.PortBindingFailed: Binding failed for port bf3af8e0-63c1-45af-8f69-da97b8b24149, please check neutron logs for more information. [ 836.482795] env[62503]: ERROR nova.compute.manager [instance: 4cb117e3-ff57-4e7f-bb2b-a12c988e362c] [ 836.482795] env[62503]: DEBUG nova.compute.utils [None req-47a18854-ca82-4c3a-9a6e-798e903be44b tempest-ServerRescueTestJSONUnderV235-862506334 tempest-ServerRescueTestJSONUnderV235-862506334-project-member] [instance: 4cb117e3-ff57-4e7f-bb2b-a12c988e362c] Binding failed for port bf3af8e0-63c1-45af-8f69-da97b8b24149, please check neutron logs for more information. {{(pid=62503) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 836.486094] env[62503]: DEBUG nova.compute.manager [None req-47a18854-ca82-4c3a-9a6e-798e903be44b tempest-ServerRescueTestJSONUnderV235-862506334 tempest-ServerRescueTestJSONUnderV235-862506334-project-member] [instance: 4cb117e3-ff57-4e7f-bb2b-a12c988e362c] Build of instance 4cb117e3-ff57-4e7f-bb2b-a12c988e362c was re-scheduled: Binding failed for port bf3af8e0-63c1-45af-8f69-da97b8b24149, please check neutron logs for more information. {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2483}} [ 836.486094] env[62503]: DEBUG nova.compute.manager [None req-47a18854-ca82-4c3a-9a6e-798e903be44b tempest-ServerRescueTestJSONUnderV235-862506334 tempest-ServerRescueTestJSONUnderV235-862506334-project-member] [instance: 4cb117e3-ff57-4e7f-bb2b-a12c988e362c] Unplugging VIFs for instance {{(pid=62503) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3009}} [ 836.486094] env[62503]: DEBUG oslo_concurrency.lockutils [None req-47a18854-ca82-4c3a-9a6e-798e903be44b tempest-ServerRescueTestJSONUnderV235-862506334 tempest-ServerRescueTestJSONUnderV235-862506334-project-member] Acquiring lock "refresh_cache-4cb117e3-ff57-4e7f-bb2b-a12c988e362c" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 836.486094] env[62503]: DEBUG oslo_concurrency.lockutils [None req-47a18854-ca82-4c3a-9a6e-798e903be44b tempest-ServerRescueTestJSONUnderV235-862506334 tempest-ServerRescueTestJSONUnderV235-862506334-project-member] Acquired lock "refresh_cache-4cb117e3-ff57-4e7f-bb2b-a12c988e362c" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 836.486247] env[62503]: DEBUG nova.network.neutron [None req-47a18854-ca82-4c3a-9a6e-798e903be44b tempest-ServerRescueTestJSONUnderV235-862506334 tempest-ServerRescueTestJSONUnderV235-862506334-project-member] [instance: 4cb117e3-ff57-4e7f-bb2b-a12c988e362c] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 836.489511] env[62503]: DEBUG oslo_concurrency.lockutils [None req-578ece02-e979-44e0-9e11-eefd26357b90 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.811s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 836.489511] env[62503]: INFO nova.compute.claims [None req-578ece02-e979-44e0-9e11-eefd26357b90 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: c5f2cc73-6bcd-4422-890b-3299d4cf4534] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 836.651925] env[62503]: DEBUG nova.compute.manager [None req-1d96ce89-6413-4ef6-9cd7-d46364d67260 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028-project-member] [instance: 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2] Start spawning the instance on the hypervisor. {{(pid=62503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2641}} [ 836.658030] env[62503]: INFO nova.compute.manager [-] [instance: 4deb28e7-351b-41b7-90bb-afdde200f7fa] Took 1.03 seconds to deallocate network for instance. [ 836.660655] env[62503]: DEBUG nova.compute.claims [None req-d66ce770-1029-4d34-900e-8c450c39e993 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: 4deb28e7-351b-41b7-90bb-afdde200f7fa] Aborting claim: {{(pid=62503) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 836.660875] env[62503]: DEBUG oslo_concurrency.lockutils [None req-d66ce770-1029-4d34-900e-8c450c39e993 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 836.683464] env[62503]: DEBUG nova.virt.hardware [None req-1d96ce89-6413-4ef6-9cd7-d46364d67260 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:26:20Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:26:03Z,direct_url=,disk_format='vmdk',id=8150ca02-f879-471d-8913-459408f127a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26d9ee75d25b4018b8bb1fb11c8bd98c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:26:04Z,virtual_size=,visibility=), allow threads: False {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 836.683714] env[62503]: DEBUG nova.virt.hardware [None req-1d96ce89-6413-4ef6-9cd7-d46364d67260 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028-project-member] Flavor limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 836.683874] env[62503]: DEBUG nova.virt.hardware [None req-1d96ce89-6413-4ef6-9cd7-d46364d67260 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028-project-member] Image limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 836.688160] env[62503]: DEBUG nova.virt.hardware [None req-1d96ce89-6413-4ef6-9cd7-d46364d67260 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028-project-member] Flavor pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 836.688382] env[62503]: DEBUG nova.virt.hardware [None req-1d96ce89-6413-4ef6-9cd7-d46364d67260 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028-project-member] Image pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 836.688545] env[62503]: DEBUG nova.virt.hardware [None req-1d96ce89-6413-4ef6-9cd7-d46364d67260 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 836.688767] env[62503]: DEBUG nova.virt.hardware [None req-1d96ce89-6413-4ef6-9cd7-d46364d67260 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 836.688953] env[62503]: DEBUG nova.virt.hardware [None req-1d96ce89-6413-4ef6-9cd7-d46364d67260 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 836.689157] env[62503]: DEBUG nova.virt.hardware [None req-1d96ce89-6413-4ef6-9cd7-d46364d67260 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028-project-member] Got 1 possible topologies {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 836.689326] env[62503]: DEBUG nova.virt.hardware [None req-1d96ce89-6413-4ef6-9cd7-d46364d67260 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 836.689500] env[62503]: DEBUG nova.virt.hardware [None req-1d96ce89-6413-4ef6-9cd7-d46364d67260 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 836.690504] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-851394de-6bce-4dc7-a77e-083c3170df99 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.701605] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42f86b78-ba18-40ac-8d27-998bd4b4bb06 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.813199] env[62503]: ERROR nova.compute.manager [None req-1d96ce89-6413-4ef6-9cd7-d46364d67260 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 7de7e64b-635a-4946-9db0-68ebc5ffbf63, please check neutron logs for more information. [ 836.813199] env[62503]: ERROR nova.compute.manager Traceback (most recent call last): [ 836.813199] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 836.813199] env[62503]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 836.813199] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 836.813199] env[62503]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 836.813199] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 836.813199] env[62503]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 836.813199] env[62503]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 836.813199] env[62503]: ERROR nova.compute.manager self.force_reraise() [ 836.813199] env[62503]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 836.813199] env[62503]: ERROR nova.compute.manager raise self.value [ 836.813199] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 836.813199] env[62503]: ERROR nova.compute.manager updated_port = self._update_port( [ 836.813199] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 836.813199] env[62503]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 836.813672] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 836.813672] env[62503]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 836.813672] env[62503]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 7de7e64b-635a-4946-9db0-68ebc5ffbf63, please check neutron logs for more information. [ 836.813672] env[62503]: ERROR nova.compute.manager [ 836.813672] env[62503]: Traceback (most recent call last): [ 836.813672] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 836.813672] env[62503]: listener.cb(fileno) [ 836.813672] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 836.813672] env[62503]: result = function(*args, **kwargs) [ 836.813672] env[62503]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 836.813672] env[62503]: return func(*args, **kwargs) [ 836.813672] env[62503]: File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 836.813672] env[62503]: raise e [ 836.813672] env[62503]: File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 836.813672] env[62503]: nwinfo = self.network_api.allocate_for_instance( [ 836.813672] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 836.813672] env[62503]: created_port_ids = self._update_ports_for_instance( [ 836.813672] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 836.813672] env[62503]: with excutils.save_and_reraise_exception(): [ 836.813672] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 836.813672] env[62503]: self.force_reraise() [ 836.813672] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 836.813672] env[62503]: raise self.value [ 836.813672] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 836.813672] env[62503]: updated_port = self._update_port( [ 836.813672] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 836.813672] env[62503]: _ensure_no_port_binding_failure(port) [ 836.813672] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 836.813672] env[62503]: raise exception.PortBindingFailed(port_id=port['id']) [ 836.814420] env[62503]: nova.exception.PortBindingFailed: Binding failed for port 7de7e64b-635a-4946-9db0-68ebc5ffbf63, please check neutron logs for more information. [ 836.814420] env[62503]: Removing descriptor: 14 [ 836.814420] env[62503]: ERROR nova.compute.manager [None req-1d96ce89-6413-4ef6-9cd7-d46364d67260 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028-project-member] [instance: 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 7de7e64b-635a-4946-9db0-68ebc5ffbf63, please check neutron logs for more information. [ 836.814420] env[62503]: ERROR nova.compute.manager [instance: 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2] Traceback (most recent call last): [ 836.814420] env[62503]: ERROR nova.compute.manager [instance: 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2] File "/opt/stack/nova/nova/compute/manager.py", line 2897, in _build_resources [ 836.814420] env[62503]: ERROR nova.compute.manager [instance: 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2] yield resources [ 836.814420] env[62503]: ERROR nova.compute.manager [instance: 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2] File "/opt/stack/nova/nova/compute/manager.py", line 2644, in _build_and_run_instance [ 836.814420] env[62503]: ERROR nova.compute.manager [instance: 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2] self.driver.spawn(context, instance, image_meta, [ 836.814420] env[62503]: ERROR nova.compute.manager [instance: 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 836.814420] env[62503]: ERROR nova.compute.manager [instance: 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2] self._vmops.spawn(context, instance, image_meta, injected_files, [ 836.814420] env[62503]: ERROR nova.compute.manager [instance: 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 836.814420] env[62503]: ERROR nova.compute.manager [instance: 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2] vm_ref = self.build_virtual_machine(instance, [ 836.814752] env[62503]: ERROR nova.compute.manager [instance: 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 836.814752] env[62503]: ERROR nova.compute.manager [instance: 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2] vif_infos = vmwarevif.get_vif_info(self._session, [ 836.814752] env[62503]: ERROR nova.compute.manager [instance: 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 836.814752] env[62503]: ERROR nova.compute.manager [instance: 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2] for vif in network_info: [ 836.814752] env[62503]: ERROR nova.compute.manager [instance: 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 836.814752] env[62503]: ERROR nova.compute.manager [instance: 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2] return self._sync_wrapper(fn, *args, **kwargs) [ 836.814752] env[62503]: ERROR nova.compute.manager [instance: 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 836.814752] env[62503]: ERROR nova.compute.manager [instance: 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2] self.wait() [ 836.814752] env[62503]: ERROR nova.compute.manager [instance: 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 836.814752] env[62503]: ERROR nova.compute.manager [instance: 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2] self[:] = self._gt.wait() [ 836.814752] env[62503]: ERROR nova.compute.manager [instance: 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 836.814752] env[62503]: ERROR nova.compute.manager [instance: 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2] return self._exit_event.wait() [ 836.814752] env[62503]: ERROR nova.compute.manager [instance: 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 836.815118] env[62503]: ERROR nova.compute.manager [instance: 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2] result = hub.switch() [ 836.815118] env[62503]: ERROR nova.compute.manager [instance: 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 836.815118] env[62503]: ERROR nova.compute.manager [instance: 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2] return self.greenlet.switch() [ 836.815118] env[62503]: ERROR nova.compute.manager [instance: 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 836.815118] env[62503]: ERROR nova.compute.manager [instance: 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2] result = function(*args, **kwargs) [ 836.815118] env[62503]: ERROR nova.compute.manager [instance: 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 836.815118] env[62503]: ERROR nova.compute.manager [instance: 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2] return func(*args, **kwargs) [ 836.815118] env[62503]: ERROR nova.compute.manager [instance: 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2] File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 836.815118] env[62503]: ERROR nova.compute.manager [instance: 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2] raise e [ 836.815118] env[62503]: ERROR nova.compute.manager [instance: 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2] File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 836.815118] env[62503]: ERROR nova.compute.manager [instance: 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2] nwinfo = self.network_api.allocate_for_instance( [ 836.815118] env[62503]: ERROR nova.compute.manager [instance: 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 836.815118] env[62503]: ERROR nova.compute.manager [instance: 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2] created_port_ids = self._update_ports_for_instance( [ 836.815466] env[62503]: ERROR nova.compute.manager [instance: 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 836.815466] env[62503]: ERROR nova.compute.manager [instance: 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2] with excutils.save_and_reraise_exception(): [ 836.815466] env[62503]: ERROR nova.compute.manager [instance: 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 836.815466] env[62503]: ERROR nova.compute.manager [instance: 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2] self.force_reraise() [ 836.815466] env[62503]: ERROR nova.compute.manager [instance: 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 836.815466] env[62503]: ERROR nova.compute.manager [instance: 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2] raise self.value [ 836.815466] env[62503]: ERROR nova.compute.manager [instance: 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 836.815466] env[62503]: ERROR nova.compute.manager [instance: 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2] updated_port = self._update_port( [ 836.815466] env[62503]: ERROR nova.compute.manager [instance: 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 836.815466] env[62503]: ERROR nova.compute.manager [instance: 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2] _ensure_no_port_binding_failure(port) [ 836.815466] env[62503]: ERROR nova.compute.manager [instance: 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 836.815466] env[62503]: ERROR nova.compute.manager [instance: 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2] raise exception.PortBindingFailed(port_id=port['id']) [ 836.815800] env[62503]: ERROR nova.compute.manager [instance: 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2] nova.exception.PortBindingFailed: Binding failed for port 7de7e64b-635a-4946-9db0-68ebc5ffbf63, please check neutron logs for more information. [ 836.815800] env[62503]: ERROR nova.compute.manager [instance: 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2] [ 836.815800] env[62503]: INFO nova.compute.manager [None req-1d96ce89-6413-4ef6-9cd7-d46364d67260 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028-project-member] [instance: 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2] Terminating instance [ 836.816939] env[62503]: DEBUG oslo_concurrency.lockutils [None req-1d96ce89-6413-4ef6-9cd7-d46364d67260 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028-project-member] Acquiring lock "refresh_cache-35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 836.817136] env[62503]: DEBUG oslo_concurrency.lockutils [None req-1d96ce89-6413-4ef6-9cd7-d46364d67260 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028-project-member] Acquired lock "refresh_cache-35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 836.817307] env[62503]: DEBUG nova.network.neutron [None req-1d96ce89-6413-4ef6-9cd7-d46364d67260 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028-project-member] [instance: 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 837.015844] env[62503]: DEBUG nova.network.neutron [None req-47a18854-ca82-4c3a-9a6e-798e903be44b tempest-ServerRescueTestJSONUnderV235-862506334 tempest-ServerRescueTestJSONUnderV235-862506334-project-member] [instance: 4cb117e3-ff57-4e7f-bb2b-a12c988e362c] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 837.097084] env[62503]: DEBUG nova.network.neutron [None req-47a18854-ca82-4c3a-9a6e-798e903be44b tempest-ServerRescueTestJSONUnderV235-862506334 tempest-ServerRescueTestJSONUnderV235-862506334-project-member] [instance: 4cb117e3-ff57-4e7f-bb2b-a12c988e362c] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 837.336723] env[62503]: DEBUG nova.network.neutron [None req-1d96ce89-6413-4ef6-9cd7-d46364d67260 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028-project-member] [instance: 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 837.429714] env[62503]: DEBUG nova.network.neutron [None req-1d96ce89-6413-4ef6-9cd7-d46364d67260 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028-project-member] [instance: 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 837.457071] env[62503]: DEBUG nova.compute.manager [req-700f974a-920c-48b6-9079-2cee16fcf055 req-17f2f373-64af-4220-af1b-b5fa00d8d11f service nova] [instance: 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2] Received event network-changed-7de7e64b-635a-4946-9db0-68ebc5ffbf63 {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 837.457071] env[62503]: DEBUG nova.compute.manager [req-700f974a-920c-48b6-9079-2cee16fcf055 req-17f2f373-64af-4220-af1b-b5fa00d8d11f service nova] [instance: 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2] Refreshing instance network info cache due to event network-changed-7de7e64b-635a-4946-9db0-68ebc5ffbf63. {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11432}} [ 837.457071] env[62503]: DEBUG oslo_concurrency.lockutils [req-700f974a-920c-48b6-9079-2cee16fcf055 req-17f2f373-64af-4220-af1b-b5fa00d8d11f service nova] Acquiring lock "refresh_cache-35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 837.599822] env[62503]: DEBUG oslo_concurrency.lockutils [None req-47a18854-ca82-4c3a-9a6e-798e903be44b tempest-ServerRescueTestJSONUnderV235-862506334 tempest-ServerRescueTestJSONUnderV235-862506334-project-member] Releasing lock "refresh_cache-4cb117e3-ff57-4e7f-bb2b-a12c988e362c" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 837.600099] env[62503]: DEBUG nova.compute.manager [None req-47a18854-ca82-4c3a-9a6e-798e903be44b tempest-ServerRescueTestJSONUnderV235-862506334 tempest-ServerRescueTestJSONUnderV235-862506334-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62503) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3032}} [ 837.600293] env[62503]: DEBUG nova.compute.manager [None req-47a18854-ca82-4c3a-9a6e-798e903be44b tempest-ServerRescueTestJSONUnderV235-862506334 tempest-ServerRescueTestJSONUnderV235-862506334-project-member] [instance: 4cb117e3-ff57-4e7f-bb2b-a12c988e362c] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 837.600464] env[62503]: DEBUG nova.network.neutron [None req-47a18854-ca82-4c3a-9a6e-798e903be44b tempest-ServerRescueTestJSONUnderV235-862506334 tempest-ServerRescueTestJSONUnderV235-862506334-project-member] [instance: 4cb117e3-ff57-4e7f-bb2b-a12c988e362c] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 837.614721] env[62503]: DEBUG nova.network.neutron [None req-47a18854-ca82-4c3a-9a6e-798e903be44b tempest-ServerRescueTestJSONUnderV235-862506334 tempest-ServerRescueTestJSONUnderV235-862506334-project-member] [instance: 4cb117e3-ff57-4e7f-bb2b-a12c988e362c] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 837.706455] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8676162a-7b26-4b8e-aa6b-22aaccd30b69 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.713968] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-faca3e88-4990-41f9-8ed4-50b362257949 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.743275] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-273a98fa-c75f-472f-b714-b9cb6e5048a7 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.750595] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a520b1b-a170-459f-bca9-1aaf46d31e99 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.763692] env[62503]: DEBUG nova.compute.provider_tree [None req-578ece02-e979-44e0-9e11-eefd26357b90 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 837.931665] env[62503]: DEBUG oslo_concurrency.lockutils [None req-fc728101-2fc5-4ffa-a37b-1d922196d83a tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Acquiring lock "b6fddb0d-70f5-433f-a0ef-0d6bffb35579" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 837.931929] env[62503]: DEBUG oslo_concurrency.lockutils [None req-fc728101-2fc5-4ffa-a37b-1d922196d83a tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Lock "b6fddb0d-70f5-433f-a0ef-0d6bffb35579" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 837.932263] env[62503]: DEBUG oslo_concurrency.lockutils [None req-1d96ce89-6413-4ef6-9cd7-d46364d67260 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028-project-member] Releasing lock "refresh_cache-35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 837.932642] env[62503]: DEBUG nova.compute.manager [None req-1d96ce89-6413-4ef6-9cd7-d46364d67260 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028-project-member] [instance: 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2] Start destroying the instance on the hypervisor. {{(pid=62503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3156}} [ 837.932844] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-1d96ce89-6413-4ef6-9cd7-d46364d67260 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028-project-member] [instance: 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2] Destroying instance {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 837.933161] env[62503]: DEBUG oslo_concurrency.lockutils [req-700f974a-920c-48b6-9079-2cee16fcf055 req-17f2f373-64af-4220-af1b-b5fa00d8d11f service nova] Acquired lock "refresh_cache-35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 837.933330] env[62503]: DEBUG nova.network.neutron [req-700f974a-920c-48b6-9079-2cee16fcf055 req-17f2f373-64af-4220-af1b-b5fa00d8d11f service nova] [instance: 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2] Refreshing network info cache for port 7de7e64b-635a-4946-9db0-68ebc5ffbf63 {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 837.934384] env[62503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-920313b0-b893-4b08-b6d6-d125697b2cd4 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.944809] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c207338-c98c-4a01-a0f8-db842ff1e268 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.966242] env[62503]: WARNING nova.virt.vmwareapi.vmops [None req-1d96ce89-6413-4ef6-9cd7-d46364d67260 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028-project-member] [instance: 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2 could not be found. [ 837.966452] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-1d96ce89-6413-4ef6-9cd7-d46364d67260 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028-project-member] [instance: 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2] Instance destroyed {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 837.966632] env[62503]: INFO nova.compute.manager [None req-1d96ce89-6413-4ef6-9cd7-d46364d67260 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028-project-member] [instance: 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2] Took 0.03 seconds to destroy the instance on the hypervisor. [ 837.966876] env[62503]: DEBUG oslo.service.loopingcall [None req-1d96ce89-6413-4ef6-9cd7-d46364d67260 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 837.967095] env[62503]: DEBUG nova.compute.manager [-] [instance: 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 837.967190] env[62503]: DEBUG nova.network.neutron [-] [instance: 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 837.981916] env[62503]: DEBUG nova.network.neutron [-] [instance: 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 838.117567] env[62503]: DEBUG nova.network.neutron [None req-47a18854-ca82-4c3a-9a6e-798e903be44b tempest-ServerRescueTestJSONUnderV235-862506334 tempest-ServerRescueTestJSONUnderV235-862506334-project-member] [instance: 4cb117e3-ff57-4e7f-bb2b-a12c988e362c] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 838.267113] env[62503]: DEBUG nova.scheduler.client.report [None req-578ece02-e979-44e0-9e11-eefd26357b90 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 838.484397] env[62503]: DEBUG nova.network.neutron [-] [instance: 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 838.525137] env[62503]: DEBUG nova.network.neutron [req-700f974a-920c-48b6-9079-2cee16fcf055 req-17f2f373-64af-4220-af1b-b5fa00d8d11f service nova] [instance: 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 838.619884] env[62503]: INFO nova.compute.manager [None req-47a18854-ca82-4c3a-9a6e-798e903be44b tempest-ServerRescueTestJSONUnderV235-862506334 tempest-ServerRescueTestJSONUnderV235-862506334-project-member] [instance: 4cb117e3-ff57-4e7f-bb2b-a12c988e362c] Took 1.02 seconds to deallocate network for instance. [ 838.631376] env[62503]: DEBUG nova.network.neutron [req-700f974a-920c-48b6-9079-2cee16fcf055 req-17f2f373-64af-4220-af1b-b5fa00d8d11f service nova] [instance: 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 838.772086] env[62503]: DEBUG oslo_concurrency.lockutils [None req-578ece02-e979-44e0-9e11-eefd26357b90 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.284s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 838.772661] env[62503]: DEBUG nova.compute.manager [None req-578ece02-e979-44e0-9e11-eefd26357b90 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: c5f2cc73-6bcd-4422-890b-3299d4cf4534] Start building networks asynchronously for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2832}} [ 838.775295] env[62503]: DEBUG oslo_concurrency.lockutils [None req-66c68936-6590-4907-b875-c3ea4a73524c tempest-ServersNegativeTestJSON-1133423638 tempest-ServersNegativeTestJSON-1133423638-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.269s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 838.776652] env[62503]: INFO nova.compute.claims [None req-66c68936-6590-4907-b875-c3ea4a73524c tempest-ServersNegativeTestJSON-1133423638 tempest-ServersNegativeTestJSON-1133423638-project-member] [instance: 1e355e38-60c6-4e7f-beb4-160c4527ec51] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 838.987126] env[62503]: INFO nova.compute.manager [-] [instance: 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2] Took 1.02 seconds to deallocate network for instance. [ 838.989591] env[62503]: DEBUG nova.compute.claims [None req-1d96ce89-6413-4ef6-9cd7-d46364d67260 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028-project-member] [instance: 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2] Aborting claim: {{(pid=62503) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 838.989827] env[62503]: DEBUG oslo_concurrency.lockutils [None req-1d96ce89-6413-4ef6-9cd7-d46364d67260 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 839.134029] env[62503]: DEBUG oslo_concurrency.lockutils [req-700f974a-920c-48b6-9079-2cee16fcf055 req-17f2f373-64af-4220-af1b-b5fa00d8d11f service nova] Releasing lock "refresh_cache-35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 839.134283] env[62503]: DEBUG nova.compute.manager [req-700f974a-920c-48b6-9079-2cee16fcf055 req-17f2f373-64af-4220-af1b-b5fa00d8d11f service nova] [instance: 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2] Received event network-vif-deleted-7de7e64b-635a-4946-9db0-68ebc5ffbf63 {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 839.283109] env[62503]: DEBUG nova.compute.utils [None req-578ece02-e979-44e0-9e11-eefd26357b90 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Using /dev/sd instead of None {{(pid=62503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 839.284528] env[62503]: DEBUG nova.compute.manager [None req-578ece02-e979-44e0-9e11-eefd26357b90 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: c5f2cc73-6bcd-4422-890b-3299d4cf4534] Allocating IP information in the background. {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 839.284676] env[62503]: DEBUG nova.network.neutron [None req-578ece02-e979-44e0-9e11-eefd26357b90 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: c5f2cc73-6bcd-4422-890b-3299d4cf4534] allocate_for_instance() {{(pid=62503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 839.323681] env[62503]: DEBUG nova.policy [None req-578ece02-e979-44e0-9e11-eefd26357b90 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b0e90b6b4c414583af760c03e336e4d1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f823912f7b1a4998a6dbc22060cf6c5e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62503) authorize /opt/stack/nova/nova/policy.py:201}} [ 839.615022] env[62503]: DEBUG nova.network.neutron [None req-578ece02-e979-44e0-9e11-eefd26357b90 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: c5f2cc73-6bcd-4422-890b-3299d4cf4534] Successfully created port: 4791ae83-b652-4809-8446-60fd6b4220e0 {{(pid=62503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 839.662648] env[62503]: INFO nova.scheduler.client.report [None req-47a18854-ca82-4c3a-9a6e-798e903be44b tempest-ServerRescueTestJSONUnderV235-862506334 tempest-ServerRescueTestJSONUnderV235-862506334-project-member] Deleted allocations for instance 4cb117e3-ff57-4e7f-bb2b-a12c988e362c [ 839.788530] env[62503]: DEBUG nova.compute.manager [None req-578ece02-e979-44e0-9e11-eefd26357b90 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: c5f2cc73-6bcd-4422-890b-3299d4cf4534] Start building block device mappings for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2867}} [ 839.967352] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6322f5a-7679-4a17-836e-361de8342553 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.976363] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13613267-3d02-4198-9fa4-5501204139f7 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.007523] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24dd4f5c-4d2b-4976-8998-cb2361db6f87 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.015012] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7462de88-5636-4cdd-b69a-0fb8374869ec {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.028188] env[62503]: DEBUG nova.compute.provider_tree [None req-66c68936-6590-4907-b875-c3ea4a73524c tempest-ServersNegativeTestJSON-1133423638 tempest-ServersNegativeTestJSON-1133423638-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 840.175848] env[62503]: DEBUG oslo_concurrency.lockutils [None req-47a18854-ca82-4c3a-9a6e-798e903be44b tempest-ServerRescueTestJSONUnderV235-862506334 tempest-ServerRescueTestJSONUnderV235-862506334-project-member] Lock "4cb117e3-ff57-4e7f-bb2b-a12c988e362c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 101.333s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 840.531330] env[62503]: DEBUG nova.scheduler.client.report [None req-66c68936-6590-4907-b875-c3ea4a73524c tempest-ServersNegativeTestJSON-1133423638 tempest-ServersNegativeTestJSON-1133423638-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 840.545347] env[62503]: DEBUG nova.compute.manager [req-19e06da5-e38e-4c02-8eb8-c9d98c325f37 req-7c2897f4-66c9-4b28-a926-acc07c3f1901 service nova] [instance: c5f2cc73-6bcd-4422-890b-3299d4cf4534] Received event network-changed-4791ae83-b652-4809-8446-60fd6b4220e0 {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 840.545347] env[62503]: DEBUG nova.compute.manager [req-19e06da5-e38e-4c02-8eb8-c9d98c325f37 req-7c2897f4-66c9-4b28-a926-acc07c3f1901 service nova] [instance: c5f2cc73-6bcd-4422-890b-3299d4cf4534] Refreshing instance network info cache due to event network-changed-4791ae83-b652-4809-8446-60fd6b4220e0. {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11432}} [ 840.545347] env[62503]: DEBUG oslo_concurrency.lockutils [req-19e06da5-e38e-4c02-8eb8-c9d98c325f37 req-7c2897f4-66c9-4b28-a926-acc07c3f1901 service nova] Acquiring lock "refresh_cache-c5f2cc73-6bcd-4422-890b-3299d4cf4534" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 840.545347] env[62503]: DEBUG oslo_concurrency.lockutils [req-19e06da5-e38e-4c02-8eb8-c9d98c325f37 req-7c2897f4-66c9-4b28-a926-acc07c3f1901 service nova] Acquired lock "refresh_cache-c5f2cc73-6bcd-4422-890b-3299d4cf4534" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 840.545347] env[62503]: DEBUG nova.network.neutron [req-19e06da5-e38e-4c02-8eb8-c9d98c325f37 req-7c2897f4-66c9-4b28-a926-acc07c3f1901 service nova] [instance: c5f2cc73-6bcd-4422-890b-3299d4cf4534] Refreshing network info cache for port 4791ae83-b652-4809-8446-60fd6b4220e0 {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 840.678921] env[62503]: DEBUG nova.compute.manager [None req-242ac6f9-00b2-4413-9a56-88c712c0138b tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] [instance: 6229dda6-90e8-457b-beb3-2107e3700b29] Starting instance... {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 840.734654] env[62503]: ERROR nova.compute.manager [None req-578ece02-e979-44e0-9e11-eefd26357b90 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 4791ae83-b652-4809-8446-60fd6b4220e0, please check neutron logs for more information. [ 840.734654] env[62503]: ERROR nova.compute.manager Traceback (most recent call last): [ 840.734654] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 840.734654] env[62503]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 840.734654] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 840.734654] env[62503]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 840.734654] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 840.734654] env[62503]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 840.734654] env[62503]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 840.734654] env[62503]: ERROR nova.compute.manager self.force_reraise() [ 840.734654] env[62503]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 840.734654] env[62503]: ERROR nova.compute.manager raise self.value [ 840.734654] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 840.734654] env[62503]: ERROR nova.compute.manager updated_port = self._update_port( [ 840.734654] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 840.734654] env[62503]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 840.735049] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 840.735049] env[62503]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 840.735049] env[62503]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 4791ae83-b652-4809-8446-60fd6b4220e0, please check neutron logs for more information. [ 840.735049] env[62503]: ERROR nova.compute.manager [ 840.735049] env[62503]: Traceback (most recent call last): [ 840.735049] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 840.735049] env[62503]: listener.cb(fileno) [ 840.735049] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 840.735049] env[62503]: result = function(*args, **kwargs) [ 840.735049] env[62503]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 840.735049] env[62503]: return func(*args, **kwargs) [ 840.735049] env[62503]: File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 840.735049] env[62503]: raise e [ 840.735049] env[62503]: File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 840.735049] env[62503]: nwinfo = self.network_api.allocate_for_instance( [ 840.735049] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 840.735049] env[62503]: created_port_ids = self._update_ports_for_instance( [ 840.735049] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 840.735049] env[62503]: with excutils.save_and_reraise_exception(): [ 840.735049] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 840.735049] env[62503]: self.force_reraise() [ 840.735049] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 840.735049] env[62503]: raise self.value [ 840.735049] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 840.735049] env[62503]: updated_port = self._update_port( [ 840.735049] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 840.735049] env[62503]: _ensure_no_port_binding_failure(port) [ 840.735049] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 840.735049] env[62503]: raise exception.PortBindingFailed(port_id=port['id']) [ 840.735705] env[62503]: nova.exception.PortBindingFailed: Binding failed for port 4791ae83-b652-4809-8446-60fd6b4220e0, please check neutron logs for more information. [ 840.735705] env[62503]: Removing descriptor: 14 [ 840.798814] env[62503]: DEBUG nova.compute.manager [None req-578ece02-e979-44e0-9e11-eefd26357b90 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: c5f2cc73-6bcd-4422-890b-3299d4cf4534] Start spawning the instance on the hypervisor. {{(pid=62503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2641}} [ 840.826024] env[62503]: DEBUG nova.virt.hardware [None req-578ece02-e979-44e0-9e11-eefd26357b90 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:26:20Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:26:03Z,direct_url=,disk_format='vmdk',id=8150ca02-f879-471d-8913-459408f127a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26d9ee75d25b4018b8bb1fb11c8bd98c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:26:04Z,virtual_size=,visibility=), allow threads: False {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 840.826024] env[62503]: DEBUG nova.virt.hardware [None req-578ece02-e979-44e0-9e11-eefd26357b90 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Flavor limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 840.826024] env[62503]: DEBUG nova.virt.hardware [None req-578ece02-e979-44e0-9e11-eefd26357b90 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Image limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 840.826206] env[62503]: DEBUG nova.virt.hardware [None req-578ece02-e979-44e0-9e11-eefd26357b90 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Flavor pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 840.826564] env[62503]: DEBUG nova.virt.hardware [None req-578ece02-e979-44e0-9e11-eefd26357b90 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Image pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 840.826919] env[62503]: DEBUG nova.virt.hardware [None req-578ece02-e979-44e0-9e11-eefd26357b90 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 840.827334] env[62503]: DEBUG nova.virt.hardware [None req-578ece02-e979-44e0-9e11-eefd26357b90 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 840.827647] env[62503]: DEBUG nova.virt.hardware [None req-578ece02-e979-44e0-9e11-eefd26357b90 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 840.828086] env[62503]: DEBUG nova.virt.hardware [None req-578ece02-e979-44e0-9e11-eefd26357b90 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Got 1 possible topologies {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 840.828255] env[62503]: DEBUG nova.virt.hardware [None req-578ece02-e979-44e0-9e11-eefd26357b90 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 840.828554] env[62503]: DEBUG nova.virt.hardware [None req-578ece02-e979-44e0-9e11-eefd26357b90 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 840.830033] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c0961f7-ef78-4329-a874-e5b773465c71 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.837583] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2032759a-e2c3-49ab-89bb-f2c240a8ff11 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.854802] env[62503]: ERROR nova.compute.manager [None req-578ece02-e979-44e0-9e11-eefd26357b90 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: c5f2cc73-6bcd-4422-890b-3299d4cf4534] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 4791ae83-b652-4809-8446-60fd6b4220e0, please check neutron logs for more information. [ 840.854802] env[62503]: ERROR nova.compute.manager [instance: c5f2cc73-6bcd-4422-890b-3299d4cf4534] Traceback (most recent call last): [ 840.854802] env[62503]: ERROR nova.compute.manager [instance: c5f2cc73-6bcd-4422-890b-3299d4cf4534] File "/opt/stack/nova/nova/compute/manager.py", line 2897, in _build_resources [ 840.854802] env[62503]: ERROR nova.compute.manager [instance: c5f2cc73-6bcd-4422-890b-3299d4cf4534] yield resources [ 840.854802] env[62503]: ERROR nova.compute.manager [instance: c5f2cc73-6bcd-4422-890b-3299d4cf4534] File "/opt/stack/nova/nova/compute/manager.py", line 2644, in _build_and_run_instance [ 840.854802] env[62503]: ERROR nova.compute.manager [instance: c5f2cc73-6bcd-4422-890b-3299d4cf4534] self.driver.spawn(context, instance, image_meta, [ 840.854802] env[62503]: ERROR nova.compute.manager [instance: c5f2cc73-6bcd-4422-890b-3299d4cf4534] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 840.854802] env[62503]: ERROR nova.compute.manager [instance: c5f2cc73-6bcd-4422-890b-3299d4cf4534] self._vmops.spawn(context, instance, image_meta, injected_files, [ 840.854802] env[62503]: ERROR nova.compute.manager [instance: c5f2cc73-6bcd-4422-890b-3299d4cf4534] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 840.854802] env[62503]: ERROR nova.compute.manager [instance: c5f2cc73-6bcd-4422-890b-3299d4cf4534] vm_ref = self.build_virtual_machine(instance, [ 840.854802] env[62503]: ERROR nova.compute.manager [instance: c5f2cc73-6bcd-4422-890b-3299d4cf4534] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 840.855157] env[62503]: ERROR nova.compute.manager [instance: c5f2cc73-6bcd-4422-890b-3299d4cf4534] vif_infos = vmwarevif.get_vif_info(self._session, [ 840.855157] env[62503]: ERROR nova.compute.manager [instance: c5f2cc73-6bcd-4422-890b-3299d4cf4534] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 840.855157] env[62503]: ERROR nova.compute.manager [instance: c5f2cc73-6bcd-4422-890b-3299d4cf4534] for vif in network_info: [ 840.855157] env[62503]: ERROR nova.compute.manager [instance: c5f2cc73-6bcd-4422-890b-3299d4cf4534] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 840.855157] env[62503]: ERROR nova.compute.manager [instance: c5f2cc73-6bcd-4422-890b-3299d4cf4534] return self._sync_wrapper(fn, *args, **kwargs) [ 840.855157] env[62503]: ERROR nova.compute.manager [instance: c5f2cc73-6bcd-4422-890b-3299d4cf4534] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 840.855157] env[62503]: ERROR nova.compute.manager [instance: c5f2cc73-6bcd-4422-890b-3299d4cf4534] self.wait() [ 840.855157] env[62503]: ERROR nova.compute.manager [instance: c5f2cc73-6bcd-4422-890b-3299d4cf4534] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 840.855157] env[62503]: ERROR nova.compute.manager [instance: c5f2cc73-6bcd-4422-890b-3299d4cf4534] self[:] = self._gt.wait() [ 840.855157] env[62503]: ERROR nova.compute.manager [instance: c5f2cc73-6bcd-4422-890b-3299d4cf4534] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 840.855157] env[62503]: ERROR nova.compute.manager [instance: c5f2cc73-6bcd-4422-890b-3299d4cf4534] return self._exit_event.wait() [ 840.855157] env[62503]: ERROR nova.compute.manager [instance: c5f2cc73-6bcd-4422-890b-3299d4cf4534] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 840.855157] env[62503]: ERROR nova.compute.manager [instance: c5f2cc73-6bcd-4422-890b-3299d4cf4534] current.throw(*self._exc) [ 840.855468] env[62503]: ERROR nova.compute.manager [instance: c5f2cc73-6bcd-4422-890b-3299d4cf4534] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 840.855468] env[62503]: ERROR nova.compute.manager [instance: c5f2cc73-6bcd-4422-890b-3299d4cf4534] result = function(*args, **kwargs) [ 840.855468] env[62503]: ERROR nova.compute.manager [instance: c5f2cc73-6bcd-4422-890b-3299d4cf4534] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 840.855468] env[62503]: ERROR nova.compute.manager [instance: c5f2cc73-6bcd-4422-890b-3299d4cf4534] return func(*args, **kwargs) [ 840.855468] env[62503]: ERROR nova.compute.manager [instance: c5f2cc73-6bcd-4422-890b-3299d4cf4534] File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 840.855468] env[62503]: ERROR nova.compute.manager [instance: c5f2cc73-6bcd-4422-890b-3299d4cf4534] raise e [ 840.855468] env[62503]: ERROR nova.compute.manager [instance: c5f2cc73-6bcd-4422-890b-3299d4cf4534] File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 840.855468] env[62503]: ERROR nova.compute.manager [instance: c5f2cc73-6bcd-4422-890b-3299d4cf4534] nwinfo = self.network_api.allocate_for_instance( [ 840.855468] env[62503]: ERROR nova.compute.manager [instance: c5f2cc73-6bcd-4422-890b-3299d4cf4534] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 840.855468] env[62503]: ERROR nova.compute.manager [instance: c5f2cc73-6bcd-4422-890b-3299d4cf4534] created_port_ids = self._update_ports_for_instance( [ 840.855468] env[62503]: ERROR nova.compute.manager [instance: c5f2cc73-6bcd-4422-890b-3299d4cf4534] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 840.855468] env[62503]: ERROR nova.compute.manager [instance: c5f2cc73-6bcd-4422-890b-3299d4cf4534] with excutils.save_and_reraise_exception(): [ 840.855468] env[62503]: ERROR nova.compute.manager [instance: c5f2cc73-6bcd-4422-890b-3299d4cf4534] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 840.857484] env[62503]: ERROR nova.compute.manager [instance: c5f2cc73-6bcd-4422-890b-3299d4cf4534] self.force_reraise() [ 840.857484] env[62503]: ERROR nova.compute.manager [instance: c5f2cc73-6bcd-4422-890b-3299d4cf4534] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 840.857484] env[62503]: ERROR nova.compute.manager [instance: c5f2cc73-6bcd-4422-890b-3299d4cf4534] raise self.value [ 840.857484] env[62503]: ERROR nova.compute.manager [instance: c5f2cc73-6bcd-4422-890b-3299d4cf4534] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 840.857484] env[62503]: ERROR nova.compute.manager [instance: c5f2cc73-6bcd-4422-890b-3299d4cf4534] updated_port = self._update_port( [ 840.857484] env[62503]: ERROR nova.compute.manager [instance: c5f2cc73-6bcd-4422-890b-3299d4cf4534] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 840.857484] env[62503]: ERROR nova.compute.manager [instance: c5f2cc73-6bcd-4422-890b-3299d4cf4534] _ensure_no_port_binding_failure(port) [ 840.857484] env[62503]: ERROR nova.compute.manager [instance: c5f2cc73-6bcd-4422-890b-3299d4cf4534] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 840.857484] env[62503]: ERROR nova.compute.manager [instance: c5f2cc73-6bcd-4422-890b-3299d4cf4534] raise exception.PortBindingFailed(port_id=port['id']) [ 840.857484] env[62503]: ERROR nova.compute.manager [instance: c5f2cc73-6bcd-4422-890b-3299d4cf4534] nova.exception.PortBindingFailed: Binding failed for port 4791ae83-b652-4809-8446-60fd6b4220e0, please check neutron logs for more information. [ 840.857484] env[62503]: ERROR nova.compute.manager [instance: c5f2cc73-6bcd-4422-890b-3299d4cf4534] [ 840.857484] env[62503]: INFO nova.compute.manager [None req-578ece02-e979-44e0-9e11-eefd26357b90 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: c5f2cc73-6bcd-4422-890b-3299d4cf4534] Terminating instance [ 840.857786] env[62503]: DEBUG oslo_concurrency.lockutils [None req-578ece02-e979-44e0-9e11-eefd26357b90 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Acquiring lock "refresh_cache-c5f2cc73-6bcd-4422-890b-3299d4cf4534" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 841.037449] env[62503]: DEBUG oslo_concurrency.lockutils [None req-66c68936-6590-4907-b875-c3ea4a73524c tempest-ServersNegativeTestJSON-1133423638 tempest-ServersNegativeTestJSON-1133423638-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.262s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 841.037832] env[62503]: DEBUG nova.compute.manager [None req-66c68936-6590-4907-b875-c3ea4a73524c tempest-ServersNegativeTestJSON-1133423638 tempest-ServersNegativeTestJSON-1133423638-project-member] [instance: 1e355e38-60c6-4e7f-beb4-160c4527ec51] Start building networks asynchronously for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2832}} [ 841.040512] env[62503]: DEBUG oslo_concurrency.lockutils [None req-0f7e3892-329e-443e-a8e9-e702f02c6757 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 17.371s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 841.096109] env[62503]: DEBUG nova.network.neutron [req-19e06da5-e38e-4c02-8eb8-c9d98c325f37 req-7c2897f4-66c9-4b28-a926-acc07c3f1901 service nova] [instance: c5f2cc73-6bcd-4422-890b-3299d4cf4534] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 841.198563] env[62503]: DEBUG nova.network.neutron [req-19e06da5-e38e-4c02-8eb8-c9d98c325f37 req-7c2897f4-66c9-4b28-a926-acc07c3f1901 service nova] [instance: c5f2cc73-6bcd-4422-890b-3299d4cf4534] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 841.209574] env[62503]: DEBUG oslo_concurrency.lockutils [None req-242ac6f9-00b2-4413-9a56-88c712c0138b tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 841.546308] env[62503]: DEBUG nova.compute.utils [None req-66c68936-6590-4907-b875-c3ea4a73524c tempest-ServersNegativeTestJSON-1133423638 tempest-ServersNegativeTestJSON-1133423638-project-member] Using /dev/sd instead of None {{(pid=62503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 841.552023] env[62503]: DEBUG nova.compute.manager [None req-66c68936-6590-4907-b875-c3ea4a73524c tempest-ServersNegativeTestJSON-1133423638 tempest-ServersNegativeTestJSON-1133423638-project-member] [instance: 1e355e38-60c6-4e7f-beb4-160c4527ec51] Allocating IP information in the background. {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 841.552023] env[62503]: DEBUG nova.network.neutron [None req-66c68936-6590-4907-b875-c3ea4a73524c tempest-ServersNegativeTestJSON-1133423638 tempest-ServersNegativeTestJSON-1133423638-project-member] [instance: 1e355e38-60c6-4e7f-beb4-160c4527ec51] allocate_for_instance() {{(pid=62503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 841.593246] env[62503]: DEBUG nova.policy [None req-66c68936-6590-4907-b875-c3ea4a73524c tempest-ServersNegativeTestJSON-1133423638 tempest-ServersNegativeTestJSON-1133423638-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '210076ba585c445caab831b688e553e6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '03165cd95bb44de0b55f2d1fe0ac1a49', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62503) authorize /opt/stack/nova/nova/policy.py:201}} [ 841.701152] env[62503]: DEBUG oslo_concurrency.lockutils [req-19e06da5-e38e-4c02-8eb8-c9d98c325f37 req-7c2897f4-66c9-4b28-a926-acc07c3f1901 service nova] Releasing lock "refresh_cache-c5f2cc73-6bcd-4422-890b-3299d4cf4534" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 841.701698] env[62503]: DEBUG oslo_concurrency.lockutils [None req-578ece02-e979-44e0-9e11-eefd26357b90 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Acquired lock "refresh_cache-c5f2cc73-6bcd-4422-890b-3299d4cf4534" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 841.701863] env[62503]: DEBUG nova.network.neutron [None req-578ece02-e979-44e0-9e11-eefd26357b90 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: c5f2cc73-6bcd-4422-890b-3299d4cf4534] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 841.756104] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-227ef4a7-7cae-4061-9c72-99fd35f8e9e4 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.765165] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3eb6fe7-2876-44e5-9c2f-137375ab0826 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.798576] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cdfe92e-7b49-4d0d-af61-97eb7199231e {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.806351] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67653a32-5496-420f-be61-7444145d9ea2 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.822018] env[62503]: DEBUG nova.compute.provider_tree [None req-0f7e3892-329e-443e-a8e9-e702f02c6757 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 841.906677] env[62503]: DEBUG nova.network.neutron [None req-66c68936-6590-4907-b875-c3ea4a73524c tempest-ServersNegativeTestJSON-1133423638 tempest-ServersNegativeTestJSON-1133423638-project-member] [instance: 1e355e38-60c6-4e7f-beb4-160c4527ec51] Successfully created port: 630e253e-07e4-4fde-87cb-26c06ff7dd5e {{(pid=62503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 842.054264] env[62503]: DEBUG nova.compute.manager [None req-66c68936-6590-4907-b875-c3ea4a73524c tempest-ServersNegativeTestJSON-1133423638 tempest-ServersNegativeTestJSON-1133423638-project-member] [instance: 1e355e38-60c6-4e7f-beb4-160c4527ec51] Start building block device mappings for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2867}} [ 842.229095] env[62503]: DEBUG nova.network.neutron [None req-578ece02-e979-44e0-9e11-eefd26357b90 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: c5f2cc73-6bcd-4422-890b-3299d4cf4534] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 842.309530] env[62503]: DEBUG nova.network.neutron [None req-578ece02-e979-44e0-9e11-eefd26357b90 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: c5f2cc73-6bcd-4422-890b-3299d4cf4534] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 842.324385] env[62503]: DEBUG nova.scheduler.client.report [None req-0f7e3892-329e-443e-a8e9-e702f02c6757 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 842.569409] env[62503]: DEBUG nova.compute.manager [req-371441ea-603b-472f-8c89-644254c7ce97 req-3d985203-6a1c-4fc0-8693-f0b2740e2098 service nova] [instance: c5f2cc73-6bcd-4422-890b-3299d4cf4534] Received event network-vif-deleted-4791ae83-b652-4809-8446-60fd6b4220e0 {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 842.569701] env[62503]: DEBUG nova.compute.manager [req-371441ea-603b-472f-8c89-644254c7ce97 req-3d985203-6a1c-4fc0-8693-f0b2740e2098 service nova] [instance: 1e355e38-60c6-4e7f-beb4-160c4527ec51] Received event network-changed-630e253e-07e4-4fde-87cb-26c06ff7dd5e {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 842.569809] env[62503]: DEBUG nova.compute.manager [req-371441ea-603b-472f-8c89-644254c7ce97 req-3d985203-6a1c-4fc0-8693-f0b2740e2098 service nova] [instance: 1e355e38-60c6-4e7f-beb4-160c4527ec51] Refreshing instance network info cache due to event network-changed-630e253e-07e4-4fde-87cb-26c06ff7dd5e. {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11432}} [ 842.570429] env[62503]: DEBUG oslo_concurrency.lockutils [req-371441ea-603b-472f-8c89-644254c7ce97 req-3d985203-6a1c-4fc0-8693-f0b2740e2098 service nova] Acquiring lock "refresh_cache-1e355e38-60c6-4e7f-beb4-160c4527ec51" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 842.570576] env[62503]: DEBUG oslo_concurrency.lockutils [req-371441ea-603b-472f-8c89-644254c7ce97 req-3d985203-6a1c-4fc0-8693-f0b2740e2098 service nova] Acquired lock "refresh_cache-1e355e38-60c6-4e7f-beb4-160c4527ec51" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 842.570739] env[62503]: DEBUG nova.network.neutron [req-371441ea-603b-472f-8c89-644254c7ce97 req-3d985203-6a1c-4fc0-8693-f0b2740e2098 service nova] [instance: 1e355e38-60c6-4e7f-beb4-160c4527ec51] Refreshing network info cache for port 630e253e-07e4-4fde-87cb-26c06ff7dd5e {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 842.762041] env[62503]: ERROR nova.compute.manager [None req-66c68936-6590-4907-b875-c3ea4a73524c tempest-ServersNegativeTestJSON-1133423638 tempest-ServersNegativeTestJSON-1133423638-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 630e253e-07e4-4fde-87cb-26c06ff7dd5e, please check neutron logs for more information. [ 842.762041] env[62503]: ERROR nova.compute.manager Traceback (most recent call last): [ 842.762041] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 842.762041] env[62503]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 842.762041] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 842.762041] env[62503]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 842.762041] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 842.762041] env[62503]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 842.762041] env[62503]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 842.762041] env[62503]: ERROR nova.compute.manager self.force_reraise() [ 842.762041] env[62503]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 842.762041] env[62503]: ERROR nova.compute.manager raise self.value [ 842.762041] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 842.762041] env[62503]: ERROR nova.compute.manager updated_port = self._update_port( [ 842.762041] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 842.762041] env[62503]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 842.762498] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 842.762498] env[62503]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 842.762498] env[62503]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 630e253e-07e4-4fde-87cb-26c06ff7dd5e, please check neutron logs for more information. [ 842.762498] env[62503]: ERROR nova.compute.manager [ 842.762498] env[62503]: Traceback (most recent call last): [ 842.762498] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 842.762498] env[62503]: listener.cb(fileno) [ 842.762498] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 842.762498] env[62503]: result = function(*args, **kwargs) [ 842.762498] env[62503]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 842.762498] env[62503]: return func(*args, **kwargs) [ 842.762498] env[62503]: File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 842.762498] env[62503]: raise e [ 842.762498] env[62503]: File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 842.762498] env[62503]: nwinfo = self.network_api.allocate_for_instance( [ 842.762498] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 842.762498] env[62503]: created_port_ids = self._update_ports_for_instance( [ 842.762498] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 842.762498] env[62503]: with excutils.save_and_reraise_exception(): [ 842.762498] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 842.762498] env[62503]: self.force_reraise() [ 842.762498] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 842.762498] env[62503]: raise self.value [ 842.762498] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 842.762498] env[62503]: updated_port = self._update_port( [ 842.762498] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 842.762498] env[62503]: _ensure_no_port_binding_failure(port) [ 842.762498] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 842.762498] env[62503]: raise exception.PortBindingFailed(port_id=port['id']) [ 842.763230] env[62503]: nova.exception.PortBindingFailed: Binding failed for port 630e253e-07e4-4fde-87cb-26c06ff7dd5e, please check neutron logs for more information. [ 842.763230] env[62503]: Removing descriptor: 14 [ 842.813103] env[62503]: DEBUG oslo_concurrency.lockutils [None req-578ece02-e979-44e0-9e11-eefd26357b90 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Releasing lock "refresh_cache-c5f2cc73-6bcd-4422-890b-3299d4cf4534" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 842.813532] env[62503]: DEBUG nova.compute.manager [None req-578ece02-e979-44e0-9e11-eefd26357b90 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: c5f2cc73-6bcd-4422-890b-3299d4cf4534] Start destroying the instance on the hypervisor. {{(pid=62503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3156}} [ 842.813722] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-578ece02-e979-44e0-9e11-eefd26357b90 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: c5f2cc73-6bcd-4422-890b-3299d4cf4534] Destroying instance {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 842.814033] env[62503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-51723943-d88b-4e52-9a9e-9121760b78bc {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.823199] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e929ad25-7753-427c-adea-36d0cda62448 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.833458] env[62503]: DEBUG oslo_concurrency.lockutils [None req-0f7e3892-329e-443e-a8e9-e702f02c6757 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.793s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 842.834048] env[62503]: ERROR nova.compute.manager [None req-0f7e3892-329e-443e-a8e9-e702f02c6757 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] [instance: 09688e22-9225-4619-a9aa-eddb332cb8ab] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 016961c8-b433-418d-a1a0-981912785d4c, please check neutron logs for more information. [ 842.834048] env[62503]: ERROR nova.compute.manager [instance: 09688e22-9225-4619-a9aa-eddb332cb8ab] Traceback (most recent call last): [ 842.834048] env[62503]: ERROR nova.compute.manager [instance: 09688e22-9225-4619-a9aa-eddb332cb8ab] File "/opt/stack/nova/nova/compute/manager.py", line 2644, in _build_and_run_instance [ 842.834048] env[62503]: ERROR nova.compute.manager [instance: 09688e22-9225-4619-a9aa-eddb332cb8ab] self.driver.spawn(context, instance, image_meta, [ 842.834048] env[62503]: ERROR nova.compute.manager [instance: 09688e22-9225-4619-a9aa-eddb332cb8ab] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 842.834048] env[62503]: ERROR nova.compute.manager [instance: 09688e22-9225-4619-a9aa-eddb332cb8ab] self._vmops.spawn(context, instance, image_meta, injected_files, [ 842.834048] env[62503]: ERROR nova.compute.manager [instance: 09688e22-9225-4619-a9aa-eddb332cb8ab] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 842.834048] env[62503]: ERROR nova.compute.manager [instance: 09688e22-9225-4619-a9aa-eddb332cb8ab] vm_ref = self.build_virtual_machine(instance, [ 842.834048] env[62503]: ERROR nova.compute.manager [instance: 09688e22-9225-4619-a9aa-eddb332cb8ab] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 842.834048] env[62503]: ERROR nova.compute.manager [instance: 09688e22-9225-4619-a9aa-eddb332cb8ab] vif_infos = vmwarevif.get_vif_info(self._session, [ 842.834048] env[62503]: ERROR nova.compute.manager [instance: 09688e22-9225-4619-a9aa-eddb332cb8ab] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 842.834381] env[62503]: ERROR nova.compute.manager [instance: 09688e22-9225-4619-a9aa-eddb332cb8ab] for vif in network_info: [ 842.834381] env[62503]: ERROR nova.compute.manager [instance: 09688e22-9225-4619-a9aa-eddb332cb8ab] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 842.834381] env[62503]: ERROR nova.compute.manager [instance: 09688e22-9225-4619-a9aa-eddb332cb8ab] return self._sync_wrapper(fn, *args, **kwargs) [ 842.834381] env[62503]: ERROR nova.compute.manager [instance: 09688e22-9225-4619-a9aa-eddb332cb8ab] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 842.834381] env[62503]: ERROR nova.compute.manager [instance: 09688e22-9225-4619-a9aa-eddb332cb8ab] self.wait() [ 842.834381] env[62503]: ERROR nova.compute.manager [instance: 09688e22-9225-4619-a9aa-eddb332cb8ab] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 842.834381] env[62503]: ERROR nova.compute.manager [instance: 09688e22-9225-4619-a9aa-eddb332cb8ab] self[:] = self._gt.wait() [ 842.834381] env[62503]: ERROR nova.compute.manager [instance: 09688e22-9225-4619-a9aa-eddb332cb8ab] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 842.834381] env[62503]: ERROR nova.compute.manager [instance: 09688e22-9225-4619-a9aa-eddb332cb8ab] return self._exit_event.wait() [ 842.834381] env[62503]: ERROR nova.compute.manager [instance: 09688e22-9225-4619-a9aa-eddb332cb8ab] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 842.834381] env[62503]: ERROR nova.compute.manager [instance: 09688e22-9225-4619-a9aa-eddb332cb8ab] current.throw(*self._exc) [ 842.834381] env[62503]: ERROR nova.compute.manager [instance: 09688e22-9225-4619-a9aa-eddb332cb8ab] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 842.834381] env[62503]: ERROR nova.compute.manager [instance: 09688e22-9225-4619-a9aa-eddb332cb8ab] result = function(*args, **kwargs) [ 842.834851] env[62503]: ERROR nova.compute.manager [instance: 09688e22-9225-4619-a9aa-eddb332cb8ab] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 842.834851] env[62503]: ERROR nova.compute.manager [instance: 09688e22-9225-4619-a9aa-eddb332cb8ab] return func(*args, **kwargs) [ 842.834851] env[62503]: ERROR nova.compute.manager [instance: 09688e22-9225-4619-a9aa-eddb332cb8ab] File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 842.834851] env[62503]: ERROR nova.compute.manager [instance: 09688e22-9225-4619-a9aa-eddb332cb8ab] raise e [ 842.834851] env[62503]: ERROR nova.compute.manager [instance: 09688e22-9225-4619-a9aa-eddb332cb8ab] File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 842.834851] env[62503]: ERROR nova.compute.manager [instance: 09688e22-9225-4619-a9aa-eddb332cb8ab] nwinfo = self.network_api.allocate_for_instance( [ 842.834851] env[62503]: ERROR nova.compute.manager [instance: 09688e22-9225-4619-a9aa-eddb332cb8ab] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 842.834851] env[62503]: ERROR nova.compute.manager [instance: 09688e22-9225-4619-a9aa-eddb332cb8ab] created_port_ids = self._update_ports_for_instance( [ 842.834851] env[62503]: ERROR nova.compute.manager [instance: 09688e22-9225-4619-a9aa-eddb332cb8ab] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 842.834851] env[62503]: ERROR nova.compute.manager [instance: 09688e22-9225-4619-a9aa-eddb332cb8ab] with excutils.save_and_reraise_exception(): [ 842.834851] env[62503]: ERROR nova.compute.manager [instance: 09688e22-9225-4619-a9aa-eddb332cb8ab] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 842.834851] env[62503]: ERROR nova.compute.manager [instance: 09688e22-9225-4619-a9aa-eddb332cb8ab] self.force_reraise() [ 842.834851] env[62503]: ERROR nova.compute.manager [instance: 09688e22-9225-4619-a9aa-eddb332cb8ab] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 842.835230] env[62503]: ERROR nova.compute.manager [instance: 09688e22-9225-4619-a9aa-eddb332cb8ab] raise self.value [ 842.835230] env[62503]: ERROR nova.compute.manager [instance: 09688e22-9225-4619-a9aa-eddb332cb8ab] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 842.835230] env[62503]: ERROR nova.compute.manager [instance: 09688e22-9225-4619-a9aa-eddb332cb8ab] updated_port = self._update_port( [ 842.835230] env[62503]: ERROR nova.compute.manager [instance: 09688e22-9225-4619-a9aa-eddb332cb8ab] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 842.835230] env[62503]: ERROR nova.compute.manager [instance: 09688e22-9225-4619-a9aa-eddb332cb8ab] _ensure_no_port_binding_failure(port) [ 842.835230] env[62503]: ERROR nova.compute.manager [instance: 09688e22-9225-4619-a9aa-eddb332cb8ab] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 842.835230] env[62503]: ERROR nova.compute.manager [instance: 09688e22-9225-4619-a9aa-eddb332cb8ab] raise exception.PortBindingFailed(port_id=port['id']) [ 842.835230] env[62503]: ERROR nova.compute.manager [instance: 09688e22-9225-4619-a9aa-eddb332cb8ab] nova.exception.PortBindingFailed: Binding failed for port 016961c8-b433-418d-a1a0-981912785d4c, please check neutron logs for more information. [ 842.835230] env[62503]: ERROR nova.compute.manager [instance: 09688e22-9225-4619-a9aa-eddb332cb8ab] [ 842.835230] env[62503]: DEBUG nova.compute.utils [None req-0f7e3892-329e-443e-a8e9-e702f02c6757 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] [instance: 09688e22-9225-4619-a9aa-eddb332cb8ab] Binding failed for port 016961c8-b433-418d-a1a0-981912785d4c, please check neutron logs for more information. {{(pid=62503) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 842.836132] env[62503]: DEBUG oslo_concurrency.lockutils [None req-7d7f8f25-2959-4048-b880-cb44f0eba2e8 tempest-ServersAaction247Test-815974925 tempest-ServersAaction247Test-815974925-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 16.726s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 842.836343] env[62503]: DEBUG nova.objects.instance [None req-7d7f8f25-2959-4048-b880-cb44f0eba2e8 tempest-ServersAaction247Test-815974925 tempest-ServersAaction247Test-815974925-project-member] Lazy-loading 'resources' on Instance uuid ef92e4ba-4ef3-4e26-9577-bad0c046ed47 {{(pid=62503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 842.837953] env[62503]: DEBUG nova.compute.manager [None req-0f7e3892-329e-443e-a8e9-e702f02c6757 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] [instance: 09688e22-9225-4619-a9aa-eddb332cb8ab] Build of instance 09688e22-9225-4619-a9aa-eddb332cb8ab was re-scheduled: Binding failed for port 016961c8-b433-418d-a1a0-981912785d4c, please check neutron logs for more information. {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2483}} [ 842.838321] env[62503]: DEBUG nova.compute.manager [None req-0f7e3892-329e-443e-a8e9-e702f02c6757 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] [instance: 09688e22-9225-4619-a9aa-eddb332cb8ab] Unplugging VIFs for instance {{(pid=62503) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3009}} [ 842.838540] env[62503]: DEBUG oslo_concurrency.lockutils [None req-0f7e3892-329e-443e-a8e9-e702f02c6757 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] Acquiring lock "refresh_cache-09688e22-9225-4619-a9aa-eddb332cb8ab" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 842.838687] env[62503]: DEBUG oslo_concurrency.lockutils [None req-0f7e3892-329e-443e-a8e9-e702f02c6757 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] Acquired lock "refresh_cache-09688e22-9225-4619-a9aa-eddb332cb8ab" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 842.838845] env[62503]: DEBUG nova.network.neutron [None req-0f7e3892-329e-443e-a8e9-e702f02c6757 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] [instance: 09688e22-9225-4619-a9aa-eddb332cb8ab] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 842.850041] env[62503]: WARNING nova.virt.vmwareapi.vmops [None req-578ece02-e979-44e0-9e11-eefd26357b90 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: c5f2cc73-6bcd-4422-890b-3299d4cf4534] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance c5f2cc73-6bcd-4422-890b-3299d4cf4534 could not be found. [ 842.850252] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-578ece02-e979-44e0-9e11-eefd26357b90 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: c5f2cc73-6bcd-4422-890b-3299d4cf4534] Instance destroyed {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 842.850425] env[62503]: INFO nova.compute.manager [None req-578ece02-e979-44e0-9e11-eefd26357b90 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: c5f2cc73-6bcd-4422-890b-3299d4cf4534] Took 0.04 seconds to destroy the instance on the hypervisor. [ 842.850659] env[62503]: DEBUG oslo.service.loopingcall [None req-578ece02-e979-44e0-9e11-eefd26357b90 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 842.850872] env[62503]: DEBUG nova.compute.manager [-] [instance: c5f2cc73-6bcd-4422-890b-3299d4cf4534] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 842.850963] env[62503]: DEBUG nova.network.neutron [-] [instance: c5f2cc73-6bcd-4422-890b-3299d4cf4534] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 842.867414] env[62503]: DEBUG nova.network.neutron [-] [instance: c5f2cc73-6bcd-4422-890b-3299d4cf4534] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 843.064025] env[62503]: DEBUG nova.compute.manager [None req-66c68936-6590-4907-b875-c3ea4a73524c tempest-ServersNegativeTestJSON-1133423638 tempest-ServersNegativeTestJSON-1133423638-project-member] [instance: 1e355e38-60c6-4e7f-beb4-160c4527ec51] Start spawning the instance on the hypervisor. {{(pid=62503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2641}} [ 843.093405] env[62503]: DEBUG nova.virt.hardware [None req-66c68936-6590-4907-b875-c3ea4a73524c tempest-ServersNegativeTestJSON-1133423638 tempest-ServersNegativeTestJSON-1133423638-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:26:20Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:26:03Z,direct_url=,disk_format='vmdk',id=8150ca02-f879-471d-8913-459408f127a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26d9ee75d25b4018b8bb1fb11c8bd98c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:26:04Z,virtual_size=,visibility=), allow threads: False {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 843.093658] env[62503]: DEBUG nova.virt.hardware [None req-66c68936-6590-4907-b875-c3ea4a73524c tempest-ServersNegativeTestJSON-1133423638 tempest-ServersNegativeTestJSON-1133423638-project-member] Flavor limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 843.093810] env[62503]: DEBUG nova.virt.hardware [None req-66c68936-6590-4907-b875-c3ea4a73524c tempest-ServersNegativeTestJSON-1133423638 tempest-ServersNegativeTestJSON-1133423638-project-member] Image limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 843.093996] env[62503]: DEBUG nova.virt.hardware [None req-66c68936-6590-4907-b875-c3ea4a73524c tempest-ServersNegativeTestJSON-1133423638 tempest-ServersNegativeTestJSON-1133423638-project-member] Flavor pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 843.094181] env[62503]: DEBUG nova.virt.hardware [None req-66c68936-6590-4907-b875-c3ea4a73524c tempest-ServersNegativeTestJSON-1133423638 tempest-ServersNegativeTestJSON-1133423638-project-member] Image pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 843.094344] env[62503]: DEBUG nova.virt.hardware [None req-66c68936-6590-4907-b875-c3ea4a73524c tempest-ServersNegativeTestJSON-1133423638 tempest-ServersNegativeTestJSON-1133423638-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 843.094553] env[62503]: DEBUG nova.virt.hardware [None req-66c68936-6590-4907-b875-c3ea4a73524c tempest-ServersNegativeTestJSON-1133423638 tempest-ServersNegativeTestJSON-1133423638-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 843.094714] env[62503]: DEBUG nova.virt.hardware [None req-66c68936-6590-4907-b875-c3ea4a73524c tempest-ServersNegativeTestJSON-1133423638 tempest-ServersNegativeTestJSON-1133423638-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 843.094881] env[62503]: DEBUG nova.virt.hardware [None req-66c68936-6590-4907-b875-c3ea4a73524c tempest-ServersNegativeTestJSON-1133423638 tempest-ServersNegativeTestJSON-1133423638-project-member] Got 1 possible topologies {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 843.095053] env[62503]: DEBUG nova.virt.hardware [None req-66c68936-6590-4907-b875-c3ea4a73524c tempest-ServersNegativeTestJSON-1133423638 tempest-ServersNegativeTestJSON-1133423638-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 843.095276] env[62503]: DEBUG nova.virt.hardware [None req-66c68936-6590-4907-b875-c3ea4a73524c tempest-ServersNegativeTestJSON-1133423638 tempest-ServersNegativeTestJSON-1133423638-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 843.096573] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4c3b3e6-8418-47a1-8e6c-a4b6e70be70f {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.100511] env[62503]: DEBUG nova.network.neutron [req-371441ea-603b-472f-8c89-644254c7ce97 req-3d985203-6a1c-4fc0-8693-f0b2740e2098 service nova] [instance: 1e355e38-60c6-4e7f-beb4-160c4527ec51] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 843.108907] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9898185d-e629-4157-87f4-d0d4f94c0e17 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.124680] env[62503]: ERROR nova.compute.manager [None req-66c68936-6590-4907-b875-c3ea4a73524c tempest-ServersNegativeTestJSON-1133423638 tempest-ServersNegativeTestJSON-1133423638-project-member] [instance: 1e355e38-60c6-4e7f-beb4-160c4527ec51] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 630e253e-07e4-4fde-87cb-26c06ff7dd5e, please check neutron logs for more information. [ 843.124680] env[62503]: ERROR nova.compute.manager [instance: 1e355e38-60c6-4e7f-beb4-160c4527ec51] Traceback (most recent call last): [ 843.124680] env[62503]: ERROR nova.compute.manager [instance: 1e355e38-60c6-4e7f-beb4-160c4527ec51] File "/opt/stack/nova/nova/compute/manager.py", line 2897, in _build_resources [ 843.124680] env[62503]: ERROR nova.compute.manager [instance: 1e355e38-60c6-4e7f-beb4-160c4527ec51] yield resources [ 843.124680] env[62503]: ERROR nova.compute.manager [instance: 1e355e38-60c6-4e7f-beb4-160c4527ec51] File "/opt/stack/nova/nova/compute/manager.py", line 2644, in _build_and_run_instance [ 843.124680] env[62503]: ERROR nova.compute.manager [instance: 1e355e38-60c6-4e7f-beb4-160c4527ec51] self.driver.spawn(context, instance, image_meta, [ 843.124680] env[62503]: ERROR nova.compute.manager [instance: 1e355e38-60c6-4e7f-beb4-160c4527ec51] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 843.124680] env[62503]: ERROR nova.compute.manager [instance: 1e355e38-60c6-4e7f-beb4-160c4527ec51] self._vmops.spawn(context, instance, image_meta, injected_files, [ 843.124680] env[62503]: ERROR nova.compute.manager [instance: 1e355e38-60c6-4e7f-beb4-160c4527ec51] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 843.124680] env[62503]: ERROR nova.compute.manager [instance: 1e355e38-60c6-4e7f-beb4-160c4527ec51] vm_ref = self.build_virtual_machine(instance, [ 843.124680] env[62503]: ERROR nova.compute.manager [instance: 1e355e38-60c6-4e7f-beb4-160c4527ec51] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 843.125390] env[62503]: ERROR nova.compute.manager [instance: 1e355e38-60c6-4e7f-beb4-160c4527ec51] vif_infos = vmwarevif.get_vif_info(self._session, [ 843.125390] env[62503]: ERROR nova.compute.manager [instance: 1e355e38-60c6-4e7f-beb4-160c4527ec51] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 843.125390] env[62503]: ERROR nova.compute.manager [instance: 1e355e38-60c6-4e7f-beb4-160c4527ec51] for vif in network_info: [ 843.125390] env[62503]: ERROR nova.compute.manager [instance: 1e355e38-60c6-4e7f-beb4-160c4527ec51] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 843.125390] env[62503]: ERROR nova.compute.manager [instance: 1e355e38-60c6-4e7f-beb4-160c4527ec51] return self._sync_wrapper(fn, *args, **kwargs) [ 843.125390] env[62503]: ERROR nova.compute.manager [instance: 1e355e38-60c6-4e7f-beb4-160c4527ec51] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 843.125390] env[62503]: ERROR nova.compute.manager [instance: 1e355e38-60c6-4e7f-beb4-160c4527ec51] self.wait() [ 843.125390] env[62503]: ERROR nova.compute.manager [instance: 1e355e38-60c6-4e7f-beb4-160c4527ec51] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 843.125390] env[62503]: ERROR nova.compute.manager [instance: 1e355e38-60c6-4e7f-beb4-160c4527ec51] self[:] = self._gt.wait() [ 843.125390] env[62503]: ERROR nova.compute.manager [instance: 1e355e38-60c6-4e7f-beb4-160c4527ec51] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 843.125390] env[62503]: ERROR nova.compute.manager [instance: 1e355e38-60c6-4e7f-beb4-160c4527ec51] return self._exit_event.wait() [ 843.125390] env[62503]: ERROR nova.compute.manager [instance: 1e355e38-60c6-4e7f-beb4-160c4527ec51] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 843.125390] env[62503]: ERROR nova.compute.manager [instance: 1e355e38-60c6-4e7f-beb4-160c4527ec51] current.throw(*self._exc) [ 843.125959] env[62503]: ERROR nova.compute.manager [instance: 1e355e38-60c6-4e7f-beb4-160c4527ec51] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 843.125959] env[62503]: ERROR nova.compute.manager [instance: 1e355e38-60c6-4e7f-beb4-160c4527ec51] result = function(*args, **kwargs) [ 843.125959] env[62503]: ERROR nova.compute.manager [instance: 1e355e38-60c6-4e7f-beb4-160c4527ec51] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 843.125959] env[62503]: ERROR nova.compute.manager [instance: 1e355e38-60c6-4e7f-beb4-160c4527ec51] return func(*args, **kwargs) [ 843.125959] env[62503]: ERROR nova.compute.manager [instance: 1e355e38-60c6-4e7f-beb4-160c4527ec51] File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 843.125959] env[62503]: ERROR nova.compute.manager [instance: 1e355e38-60c6-4e7f-beb4-160c4527ec51] raise e [ 843.125959] env[62503]: ERROR nova.compute.manager [instance: 1e355e38-60c6-4e7f-beb4-160c4527ec51] File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 843.125959] env[62503]: ERROR nova.compute.manager [instance: 1e355e38-60c6-4e7f-beb4-160c4527ec51] nwinfo = self.network_api.allocate_for_instance( [ 843.125959] env[62503]: ERROR nova.compute.manager [instance: 1e355e38-60c6-4e7f-beb4-160c4527ec51] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 843.125959] env[62503]: ERROR nova.compute.manager [instance: 1e355e38-60c6-4e7f-beb4-160c4527ec51] created_port_ids = self._update_ports_for_instance( [ 843.125959] env[62503]: ERROR nova.compute.manager [instance: 1e355e38-60c6-4e7f-beb4-160c4527ec51] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 843.125959] env[62503]: ERROR nova.compute.manager [instance: 1e355e38-60c6-4e7f-beb4-160c4527ec51] with excutils.save_and_reraise_exception(): [ 843.125959] env[62503]: ERROR nova.compute.manager [instance: 1e355e38-60c6-4e7f-beb4-160c4527ec51] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 843.126564] env[62503]: ERROR nova.compute.manager [instance: 1e355e38-60c6-4e7f-beb4-160c4527ec51] self.force_reraise() [ 843.126564] env[62503]: ERROR nova.compute.manager [instance: 1e355e38-60c6-4e7f-beb4-160c4527ec51] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 843.126564] env[62503]: ERROR nova.compute.manager [instance: 1e355e38-60c6-4e7f-beb4-160c4527ec51] raise self.value [ 843.126564] env[62503]: ERROR nova.compute.manager [instance: 1e355e38-60c6-4e7f-beb4-160c4527ec51] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 843.126564] env[62503]: ERROR nova.compute.manager [instance: 1e355e38-60c6-4e7f-beb4-160c4527ec51] updated_port = self._update_port( [ 843.126564] env[62503]: ERROR nova.compute.manager [instance: 1e355e38-60c6-4e7f-beb4-160c4527ec51] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 843.126564] env[62503]: ERROR nova.compute.manager [instance: 1e355e38-60c6-4e7f-beb4-160c4527ec51] _ensure_no_port_binding_failure(port) [ 843.126564] env[62503]: ERROR nova.compute.manager [instance: 1e355e38-60c6-4e7f-beb4-160c4527ec51] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 843.126564] env[62503]: ERROR nova.compute.manager [instance: 1e355e38-60c6-4e7f-beb4-160c4527ec51] raise exception.PortBindingFailed(port_id=port['id']) [ 843.126564] env[62503]: ERROR nova.compute.manager [instance: 1e355e38-60c6-4e7f-beb4-160c4527ec51] nova.exception.PortBindingFailed: Binding failed for port 630e253e-07e4-4fde-87cb-26c06ff7dd5e, please check neutron logs for more information. [ 843.126564] env[62503]: ERROR nova.compute.manager [instance: 1e355e38-60c6-4e7f-beb4-160c4527ec51] [ 843.126564] env[62503]: INFO nova.compute.manager [None req-66c68936-6590-4907-b875-c3ea4a73524c tempest-ServersNegativeTestJSON-1133423638 tempest-ServersNegativeTestJSON-1133423638-project-member] [instance: 1e355e38-60c6-4e7f-beb4-160c4527ec51] Terminating instance [ 843.127787] env[62503]: DEBUG oslo_concurrency.lockutils [None req-66c68936-6590-4907-b875-c3ea4a73524c tempest-ServersNegativeTestJSON-1133423638 tempest-ServersNegativeTestJSON-1133423638-project-member] Acquiring lock "refresh_cache-1e355e38-60c6-4e7f-beb4-160c4527ec51" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 843.188489] env[62503]: DEBUG nova.network.neutron [req-371441ea-603b-472f-8c89-644254c7ce97 req-3d985203-6a1c-4fc0-8693-f0b2740e2098 service nova] [instance: 1e355e38-60c6-4e7f-beb4-160c4527ec51] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 843.359358] env[62503]: DEBUG nova.network.neutron [None req-0f7e3892-329e-443e-a8e9-e702f02c6757 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] [instance: 09688e22-9225-4619-a9aa-eddb332cb8ab] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 843.369808] env[62503]: DEBUG nova.network.neutron [-] [instance: c5f2cc73-6bcd-4422-890b-3299d4cf4534] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 843.449636] env[62503]: DEBUG nova.network.neutron [None req-0f7e3892-329e-443e-a8e9-e702f02c6757 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] [instance: 09688e22-9225-4619-a9aa-eddb332cb8ab] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 843.506166] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47802ee6-4114-4392-ab46-6e5c99d53b8b {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.514013] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6454506-8a43-48b4-a255-68de0c82bea5 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.543192] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17ed5a79-c46c-4ab1-8e98-3dd9587b7107 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.549741] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6670be3-a3c6-4cdd-9fc8-3b29f827cb84 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.562308] env[62503]: DEBUG nova.compute.provider_tree [None req-7d7f8f25-2959-4048-b880-cb44f0eba2e8 tempest-ServersAaction247Test-815974925 tempest-ServersAaction247Test-815974925-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 843.692027] env[62503]: DEBUG oslo_concurrency.lockutils [req-371441ea-603b-472f-8c89-644254c7ce97 req-3d985203-6a1c-4fc0-8693-f0b2740e2098 service nova] Releasing lock "refresh_cache-1e355e38-60c6-4e7f-beb4-160c4527ec51" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 843.692027] env[62503]: DEBUG oslo_concurrency.lockutils [None req-66c68936-6590-4907-b875-c3ea4a73524c tempest-ServersNegativeTestJSON-1133423638 tempest-ServersNegativeTestJSON-1133423638-project-member] Acquired lock "refresh_cache-1e355e38-60c6-4e7f-beb4-160c4527ec51" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 843.692027] env[62503]: DEBUG nova.network.neutron [None req-66c68936-6590-4907-b875-c3ea4a73524c tempest-ServersNegativeTestJSON-1133423638 tempest-ServersNegativeTestJSON-1133423638-project-member] [instance: 1e355e38-60c6-4e7f-beb4-160c4527ec51] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 843.873219] env[62503]: INFO nova.compute.manager [-] [instance: c5f2cc73-6bcd-4422-890b-3299d4cf4534] Took 1.02 seconds to deallocate network for instance. [ 843.875531] env[62503]: DEBUG nova.compute.claims [None req-578ece02-e979-44e0-9e11-eefd26357b90 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: c5f2cc73-6bcd-4422-890b-3299d4cf4534] Aborting claim: {{(pid=62503) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 843.875706] env[62503]: DEBUG oslo_concurrency.lockutils [None req-578ece02-e979-44e0-9e11-eefd26357b90 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 843.953624] env[62503]: DEBUG oslo_concurrency.lockutils [None req-0f7e3892-329e-443e-a8e9-e702f02c6757 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] Releasing lock "refresh_cache-09688e22-9225-4619-a9aa-eddb332cb8ab" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 843.953864] env[62503]: DEBUG nova.compute.manager [None req-0f7e3892-329e-443e-a8e9-e702f02c6757 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62503) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3032}} [ 843.954060] env[62503]: DEBUG nova.compute.manager [None req-0f7e3892-329e-443e-a8e9-e702f02c6757 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] [instance: 09688e22-9225-4619-a9aa-eddb332cb8ab] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 843.954236] env[62503]: DEBUG nova.network.neutron [None req-0f7e3892-329e-443e-a8e9-e702f02c6757 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] [instance: 09688e22-9225-4619-a9aa-eddb332cb8ab] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 843.969105] env[62503]: DEBUG nova.network.neutron [None req-0f7e3892-329e-443e-a8e9-e702f02c6757 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] [instance: 09688e22-9225-4619-a9aa-eddb332cb8ab] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 844.065398] env[62503]: DEBUG nova.scheduler.client.report [None req-7d7f8f25-2959-4048-b880-cb44f0eba2e8 tempest-ServersAaction247Test-815974925 tempest-ServersAaction247Test-815974925-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 844.208738] env[62503]: DEBUG nova.network.neutron [None req-66c68936-6590-4907-b875-c3ea4a73524c tempest-ServersNegativeTestJSON-1133423638 tempest-ServersNegativeTestJSON-1133423638-project-member] [instance: 1e355e38-60c6-4e7f-beb4-160c4527ec51] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 844.275565] env[62503]: DEBUG nova.network.neutron [None req-66c68936-6590-4907-b875-c3ea4a73524c tempest-ServersNegativeTestJSON-1133423638 tempest-ServersNegativeTestJSON-1133423638-project-member] [instance: 1e355e38-60c6-4e7f-beb4-160c4527ec51] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 844.472119] env[62503]: DEBUG nova.network.neutron [None req-0f7e3892-329e-443e-a8e9-e702f02c6757 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] [instance: 09688e22-9225-4619-a9aa-eddb332cb8ab] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 844.571823] env[62503]: DEBUG oslo_concurrency.lockutils [None req-7d7f8f25-2959-4048-b880-cb44f0eba2e8 tempest-ServersAaction247Test-815974925 tempest-ServersAaction247Test-815974925-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.736s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 844.574093] env[62503]: DEBUG oslo_concurrency.lockutils [None req-d5cf39b4-4dfb-4634-a37c-ca8390f95f74 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.665s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 844.575635] env[62503]: INFO nova.compute.claims [None req-d5cf39b4-4dfb-4634-a37c-ca8390f95f74 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] [instance: 04b9ed30-2cd0-4c07-9141-76f0f53fefb4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 844.589550] env[62503]: INFO nova.scheduler.client.report [None req-7d7f8f25-2959-4048-b880-cb44f0eba2e8 tempest-ServersAaction247Test-815974925 tempest-ServersAaction247Test-815974925-project-member] Deleted allocations for instance ef92e4ba-4ef3-4e26-9577-bad0c046ed47 [ 844.594727] env[62503]: DEBUG nova.compute.manager [req-9d4ec3a9-b90b-4fa8-b309-b4a51f228a63 req-5509b2d3-5780-41b2-b380-eb38e8b9d8bd service nova] [instance: 1e355e38-60c6-4e7f-beb4-160c4527ec51] Received event network-vif-deleted-630e253e-07e4-4fde-87cb-26c06ff7dd5e {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 844.778188] env[62503]: DEBUG oslo_concurrency.lockutils [None req-66c68936-6590-4907-b875-c3ea4a73524c tempest-ServersNegativeTestJSON-1133423638 tempest-ServersNegativeTestJSON-1133423638-project-member] Releasing lock "refresh_cache-1e355e38-60c6-4e7f-beb4-160c4527ec51" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 844.778600] env[62503]: DEBUG nova.compute.manager [None req-66c68936-6590-4907-b875-c3ea4a73524c tempest-ServersNegativeTestJSON-1133423638 tempest-ServersNegativeTestJSON-1133423638-project-member] [instance: 1e355e38-60c6-4e7f-beb4-160c4527ec51] Start destroying the instance on the hypervisor. {{(pid=62503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3156}} [ 844.778796] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-66c68936-6590-4907-b875-c3ea4a73524c tempest-ServersNegativeTestJSON-1133423638 tempest-ServersNegativeTestJSON-1133423638-project-member] [instance: 1e355e38-60c6-4e7f-beb4-160c4527ec51] Destroying instance {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 844.779112] env[62503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8065600d-a2a5-4bb6-a0cd-d0b75abcccf8 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.789177] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76e86c00-1e11-4227-8589-e875185e98a0 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.810752] env[62503]: WARNING nova.virt.vmwareapi.vmops [None req-66c68936-6590-4907-b875-c3ea4a73524c tempest-ServersNegativeTestJSON-1133423638 tempest-ServersNegativeTestJSON-1133423638-project-member] [instance: 1e355e38-60c6-4e7f-beb4-160c4527ec51] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 1e355e38-60c6-4e7f-beb4-160c4527ec51 could not be found. [ 844.811022] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-66c68936-6590-4907-b875-c3ea4a73524c tempest-ServersNegativeTestJSON-1133423638 tempest-ServersNegativeTestJSON-1133423638-project-member] [instance: 1e355e38-60c6-4e7f-beb4-160c4527ec51] Instance destroyed {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 844.811244] env[62503]: INFO nova.compute.manager [None req-66c68936-6590-4907-b875-c3ea4a73524c tempest-ServersNegativeTestJSON-1133423638 tempest-ServersNegativeTestJSON-1133423638-project-member] [instance: 1e355e38-60c6-4e7f-beb4-160c4527ec51] Took 0.03 seconds to destroy the instance on the hypervisor. [ 844.811489] env[62503]: DEBUG oslo.service.loopingcall [None req-66c68936-6590-4907-b875-c3ea4a73524c tempest-ServersNegativeTestJSON-1133423638 tempest-ServersNegativeTestJSON-1133423638-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 844.811714] env[62503]: DEBUG nova.compute.manager [-] [instance: 1e355e38-60c6-4e7f-beb4-160c4527ec51] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 844.811807] env[62503]: DEBUG nova.network.neutron [-] [instance: 1e355e38-60c6-4e7f-beb4-160c4527ec51] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 844.831938] env[62503]: DEBUG nova.network.neutron [-] [instance: 1e355e38-60c6-4e7f-beb4-160c4527ec51] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 844.975308] env[62503]: INFO nova.compute.manager [None req-0f7e3892-329e-443e-a8e9-e702f02c6757 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] [instance: 09688e22-9225-4619-a9aa-eddb332cb8ab] Took 1.02 seconds to deallocate network for instance. [ 845.099338] env[62503]: DEBUG oslo_concurrency.lockutils [None req-7d7f8f25-2959-4048-b880-cb44f0eba2e8 tempest-ServersAaction247Test-815974925 tempest-ServersAaction247Test-815974925-project-member] Lock "ef92e4ba-4ef3-4e26-9577-bad0c046ed47" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 22.678s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 845.334857] env[62503]: DEBUG nova.network.neutron [-] [instance: 1e355e38-60c6-4e7f-beb4-160c4527ec51] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 845.731558] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f70b5f17-1aba-469d-a8d1-816cfce3af9a {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.741070] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22ec8e16-2ec5-4a4b-a546-4b0af4990988 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.771616] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aef76b8a-18a4-4873-a963-e5e08326c6aa {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.778963] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8323da77-a33f-4a29-add2-14f4a6c74a47 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.791841] env[62503]: DEBUG nova.compute.provider_tree [None req-d5cf39b4-4dfb-4634-a37c-ca8390f95f74 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 845.836862] env[62503]: INFO nova.compute.manager [-] [instance: 1e355e38-60c6-4e7f-beb4-160c4527ec51] Took 1.02 seconds to deallocate network for instance. [ 845.838979] env[62503]: DEBUG nova.compute.claims [None req-66c68936-6590-4907-b875-c3ea4a73524c tempest-ServersNegativeTestJSON-1133423638 tempest-ServersNegativeTestJSON-1133423638-project-member] [instance: 1e355e38-60c6-4e7f-beb4-160c4527ec51] Aborting claim: {{(pid=62503) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 845.839170] env[62503]: DEBUG oslo_concurrency.lockutils [None req-66c68936-6590-4907-b875-c3ea4a73524c tempest-ServersNegativeTestJSON-1133423638 tempest-ServersNegativeTestJSON-1133423638-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 846.003156] env[62503]: INFO nova.scheduler.client.report [None req-0f7e3892-329e-443e-a8e9-e702f02c6757 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] Deleted allocations for instance 09688e22-9225-4619-a9aa-eddb332cb8ab [ 846.295242] env[62503]: DEBUG nova.scheduler.client.report [None req-d5cf39b4-4dfb-4634-a37c-ca8390f95f74 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 846.511610] env[62503]: DEBUG oslo_concurrency.lockutils [None req-0f7e3892-329e-443e-a8e9-e702f02c6757 tempest-AttachVolumeShelveTestJSON-1963914807 tempest-AttachVolumeShelveTestJSON-1963914807-project-member] Lock "09688e22-9225-4619-a9aa-eddb332cb8ab" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 104.869s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 846.800913] env[62503]: DEBUG oslo_concurrency.lockutils [None req-d5cf39b4-4dfb-4634-a37c-ca8390f95f74 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.226s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 846.801081] env[62503]: DEBUG nova.compute.manager [None req-d5cf39b4-4dfb-4634-a37c-ca8390f95f74 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] [instance: 04b9ed30-2cd0-4c07-9141-76f0f53fefb4] Start building networks asynchronously for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2832}} [ 846.803859] env[62503]: DEBUG oslo_concurrency.lockutils [None req-5855f130-1d8c-4319-bf40-572a18bbb2a5 tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 16.701s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 846.804139] env[62503]: DEBUG nova.objects.instance [None req-5855f130-1d8c-4319-bf40-572a18bbb2a5 tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] [instance: 86422990-4215-4628-a7a7-4fdc910e304e] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62503) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 847.014184] env[62503]: DEBUG nova.compute.manager [None req-ca6b327a-4e4e-486e-a0f1-74ffee8f5abc tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] [instance: cf611345-d276-4745-a2f8-0551c9dca2c2] Starting instance... {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 847.308590] env[62503]: DEBUG nova.compute.utils [None req-d5cf39b4-4dfb-4634-a37c-ca8390f95f74 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] Using /dev/sd instead of None {{(pid=62503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 847.312952] env[62503]: DEBUG nova.compute.manager [None req-d5cf39b4-4dfb-4634-a37c-ca8390f95f74 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] [instance: 04b9ed30-2cd0-4c07-9141-76f0f53fefb4] Allocating IP information in the background. {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 847.313139] env[62503]: DEBUG nova.network.neutron [None req-d5cf39b4-4dfb-4634-a37c-ca8390f95f74 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] [instance: 04b9ed30-2cd0-4c07-9141-76f0f53fefb4] allocate_for_instance() {{(pid=62503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 847.352830] env[62503]: DEBUG nova.policy [None req-d5cf39b4-4dfb-4634-a37c-ca8390f95f74 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8c41319365a5412b9bf7480a7edba4bb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd69e3630f8144c288f8685c2201779ef', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62503) authorize /opt/stack/nova/nova/policy.py:201}} [ 847.540786] env[62503]: DEBUG oslo_concurrency.lockutils [None req-ca6b327a-4e4e-486e-a0f1-74ffee8f5abc tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 847.628023] env[62503]: DEBUG nova.network.neutron [None req-d5cf39b4-4dfb-4634-a37c-ca8390f95f74 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] [instance: 04b9ed30-2cd0-4c07-9141-76f0f53fefb4] Successfully created port: 595f989b-b0f0-4115-a28f-c984f19ae80d {{(pid=62503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 847.814332] env[62503]: DEBUG nova.compute.manager [None req-d5cf39b4-4dfb-4634-a37c-ca8390f95f74 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] [instance: 04b9ed30-2cd0-4c07-9141-76f0f53fefb4] Start building block device mappings for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2867}} [ 847.817998] env[62503]: DEBUG oslo_concurrency.lockutils [None req-5855f130-1d8c-4319-bf40-572a18bbb2a5 tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.014s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 847.819086] env[62503]: DEBUG oslo_concurrency.lockutils [None req-aaad013a-a4dc-432c-87d1-a452b2fca374 tempest-ServerPasswordTestJSON-330364349 tempest-ServerPasswordTestJSON-330364349-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 17.263s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 848.456378] env[62503]: DEBUG nova.compute.manager [req-d18a663c-c11d-42ec-aa12-03481cea3814 req-caece822-90af-422a-be39-fddff7e223c3 service nova] [instance: 04b9ed30-2cd0-4c07-9141-76f0f53fefb4] Received event network-changed-595f989b-b0f0-4115-a28f-c984f19ae80d {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 848.456616] env[62503]: DEBUG nova.compute.manager [req-d18a663c-c11d-42ec-aa12-03481cea3814 req-caece822-90af-422a-be39-fddff7e223c3 service nova] [instance: 04b9ed30-2cd0-4c07-9141-76f0f53fefb4] Refreshing instance network info cache due to event network-changed-595f989b-b0f0-4115-a28f-c984f19ae80d. {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11432}} [ 848.457239] env[62503]: DEBUG oslo_concurrency.lockutils [req-d18a663c-c11d-42ec-aa12-03481cea3814 req-caece822-90af-422a-be39-fddff7e223c3 service nova] Acquiring lock "refresh_cache-04b9ed30-2cd0-4c07-9141-76f0f53fefb4" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 848.457239] env[62503]: DEBUG oslo_concurrency.lockutils [req-d18a663c-c11d-42ec-aa12-03481cea3814 req-caece822-90af-422a-be39-fddff7e223c3 service nova] Acquired lock "refresh_cache-04b9ed30-2cd0-4c07-9141-76f0f53fefb4" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 848.457239] env[62503]: DEBUG nova.network.neutron [req-d18a663c-c11d-42ec-aa12-03481cea3814 req-caece822-90af-422a-be39-fddff7e223c3 service nova] [instance: 04b9ed30-2cd0-4c07-9141-76f0f53fefb4] Refreshing network info cache for port 595f989b-b0f0-4115-a28f-c984f19ae80d {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 848.507350] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-351eda26-ef48-4786-84b0-2126586612f5 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.516045] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e97b329-86bf-482d-b33e-dcafdf106ec8 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.546026] env[62503]: ERROR nova.compute.manager [None req-d5cf39b4-4dfb-4634-a37c-ca8390f95f74 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 595f989b-b0f0-4115-a28f-c984f19ae80d, please check neutron logs for more information. [ 848.546026] env[62503]: ERROR nova.compute.manager Traceback (most recent call last): [ 848.546026] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 848.546026] env[62503]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 848.546026] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 848.546026] env[62503]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 848.546026] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 848.546026] env[62503]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 848.546026] env[62503]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 848.546026] env[62503]: ERROR nova.compute.manager self.force_reraise() [ 848.546026] env[62503]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 848.546026] env[62503]: ERROR nova.compute.manager raise self.value [ 848.546026] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 848.546026] env[62503]: ERROR nova.compute.manager updated_port = self._update_port( [ 848.546026] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 848.546026] env[62503]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 848.546607] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 848.546607] env[62503]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 848.546607] env[62503]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 595f989b-b0f0-4115-a28f-c984f19ae80d, please check neutron logs for more information. [ 848.546607] env[62503]: ERROR nova.compute.manager [ 848.546607] env[62503]: Traceback (most recent call last): [ 848.546607] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 848.546607] env[62503]: listener.cb(fileno) [ 848.546607] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 848.546607] env[62503]: result = function(*args, **kwargs) [ 848.546607] env[62503]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 848.546607] env[62503]: return func(*args, **kwargs) [ 848.546607] env[62503]: File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 848.546607] env[62503]: raise e [ 848.546607] env[62503]: File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 848.546607] env[62503]: nwinfo = self.network_api.allocate_for_instance( [ 848.546607] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 848.546607] env[62503]: created_port_ids = self._update_ports_for_instance( [ 848.546607] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 848.546607] env[62503]: with excutils.save_and_reraise_exception(): [ 848.546607] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 848.546607] env[62503]: self.force_reraise() [ 848.546607] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 848.546607] env[62503]: raise self.value [ 848.546607] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 848.546607] env[62503]: updated_port = self._update_port( [ 848.546607] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 848.546607] env[62503]: _ensure_no_port_binding_failure(port) [ 848.546607] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 848.546607] env[62503]: raise exception.PortBindingFailed(port_id=port['id']) [ 848.548077] env[62503]: nova.exception.PortBindingFailed: Binding failed for port 595f989b-b0f0-4115-a28f-c984f19ae80d, please check neutron logs for more information. [ 848.548077] env[62503]: Removing descriptor: 14 [ 848.548077] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ec92f44-74b0-4348-ad9a-30e88dcacfdd {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.553134] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a506980a-3c3e-4469-af68-67c15cdbe46d {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.567278] env[62503]: DEBUG nova.compute.provider_tree [None req-aaad013a-a4dc-432c-87d1-a452b2fca374 tempest-ServerPasswordTestJSON-330364349 tempest-ServerPasswordTestJSON-330364349-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 848.829176] env[62503]: DEBUG nova.compute.manager [None req-d5cf39b4-4dfb-4634-a37c-ca8390f95f74 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] [instance: 04b9ed30-2cd0-4c07-9141-76f0f53fefb4] Start spawning the instance on the hypervisor. {{(pid=62503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2641}} [ 848.857977] env[62503]: DEBUG nova.virt.hardware [None req-d5cf39b4-4dfb-4634-a37c-ca8390f95f74 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:26:20Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:26:03Z,direct_url=,disk_format='vmdk',id=8150ca02-f879-471d-8913-459408f127a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26d9ee75d25b4018b8bb1fb11c8bd98c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:26:04Z,virtual_size=,visibility=), allow threads: False {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 848.858295] env[62503]: DEBUG nova.virt.hardware [None req-d5cf39b4-4dfb-4634-a37c-ca8390f95f74 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] Flavor limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 848.858495] env[62503]: DEBUG nova.virt.hardware [None req-d5cf39b4-4dfb-4634-a37c-ca8390f95f74 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] Image limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 848.858718] env[62503]: DEBUG nova.virt.hardware [None req-d5cf39b4-4dfb-4634-a37c-ca8390f95f74 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] Flavor pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 848.858896] env[62503]: DEBUG nova.virt.hardware [None req-d5cf39b4-4dfb-4634-a37c-ca8390f95f74 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] Image pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 848.859092] env[62503]: DEBUG nova.virt.hardware [None req-d5cf39b4-4dfb-4634-a37c-ca8390f95f74 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 848.859336] env[62503]: DEBUG nova.virt.hardware [None req-d5cf39b4-4dfb-4634-a37c-ca8390f95f74 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 848.859568] env[62503]: DEBUG nova.virt.hardware [None req-d5cf39b4-4dfb-4634-a37c-ca8390f95f74 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 848.859854] env[62503]: DEBUG nova.virt.hardware [None req-d5cf39b4-4dfb-4634-a37c-ca8390f95f74 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] Got 1 possible topologies {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 848.860072] env[62503]: DEBUG nova.virt.hardware [None req-d5cf39b4-4dfb-4634-a37c-ca8390f95f74 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 848.860287] env[62503]: DEBUG nova.virt.hardware [None req-d5cf39b4-4dfb-4634-a37c-ca8390f95f74 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 848.861183] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f758cab0-fadd-4bd3-a79e-e2419beb2bfa {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.869083] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9833871-53c0-420d-9e8f-667cb2821d2b {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.882874] env[62503]: ERROR nova.compute.manager [None req-d5cf39b4-4dfb-4634-a37c-ca8390f95f74 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] [instance: 04b9ed30-2cd0-4c07-9141-76f0f53fefb4] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 595f989b-b0f0-4115-a28f-c984f19ae80d, please check neutron logs for more information. [ 848.882874] env[62503]: ERROR nova.compute.manager [instance: 04b9ed30-2cd0-4c07-9141-76f0f53fefb4] Traceback (most recent call last): [ 848.882874] env[62503]: ERROR nova.compute.manager [instance: 04b9ed30-2cd0-4c07-9141-76f0f53fefb4] File "/opt/stack/nova/nova/compute/manager.py", line 2897, in _build_resources [ 848.882874] env[62503]: ERROR nova.compute.manager [instance: 04b9ed30-2cd0-4c07-9141-76f0f53fefb4] yield resources [ 848.882874] env[62503]: ERROR nova.compute.manager [instance: 04b9ed30-2cd0-4c07-9141-76f0f53fefb4] File "/opt/stack/nova/nova/compute/manager.py", line 2644, in _build_and_run_instance [ 848.882874] env[62503]: ERROR nova.compute.manager [instance: 04b9ed30-2cd0-4c07-9141-76f0f53fefb4] self.driver.spawn(context, instance, image_meta, [ 848.882874] env[62503]: ERROR nova.compute.manager [instance: 04b9ed30-2cd0-4c07-9141-76f0f53fefb4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 848.882874] env[62503]: ERROR nova.compute.manager [instance: 04b9ed30-2cd0-4c07-9141-76f0f53fefb4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 848.882874] env[62503]: ERROR nova.compute.manager [instance: 04b9ed30-2cd0-4c07-9141-76f0f53fefb4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 848.882874] env[62503]: ERROR nova.compute.manager [instance: 04b9ed30-2cd0-4c07-9141-76f0f53fefb4] vm_ref = self.build_virtual_machine(instance, [ 848.882874] env[62503]: ERROR nova.compute.manager [instance: 04b9ed30-2cd0-4c07-9141-76f0f53fefb4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 848.883358] env[62503]: ERROR nova.compute.manager [instance: 04b9ed30-2cd0-4c07-9141-76f0f53fefb4] vif_infos = vmwarevif.get_vif_info(self._session, [ 848.883358] env[62503]: ERROR nova.compute.manager [instance: 04b9ed30-2cd0-4c07-9141-76f0f53fefb4] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 848.883358] env[62503]: ERROR nova.compute.manager [instance: 04b9ed30-2cd0-4c07-9141-76f0f53fefb4] for vif in network_info: [ 848.883358] env[62503]: ERROR nova.compute.manager [instance: 04b9ed30-2cd0-4c07-9141-76f0f53fefb4] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 848.883358] env[62503]: ERROR nova.compute.manager [instance: 04b9ed30-2cd0-4c07-9141-76f0f53fefb4] return self._sync_wrapper(fn, *args, **kwargs) [ 848.883358] env[62503]: ERROR nova.compute.manager [instance: 04b9ed30-2cd0-4c07-9141-76f0f53fefb4] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 848.883358] env[62503]: ERROR nova.compute.manager [instance: 04b9ed30-2cd0-4c07-9141-76f0f53fefb4] self.wait() [ 848.883358] env[62503]: ERROR nova.compute.manager [instance: 04b9ed30-2cd0-4c07-9141-76f0f53fefb4] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 848.883358] env[62503]: ERROR nova.compute.manager [instance: 04b9ed30-2cd0-4c07-9141-76f0f53fefb4] self[:] = self._gt.wait() [ 848.883358] env[62503]: ERROR nova.compute.manager [instance: 04b9ed30-2cd0-4c07-9141-76f0f53fefb4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 848.883358] env[62503]: ERROR nova.compute.manager [instance: 04b9ed30-2cd0-4c07-9141-76f0f53fefb4] return self._exit_event.wait() [ 848.883358] env[62503]: ERROR nova.compute.manager [instance: 04b9ed30-2cd0-4c07-9141-76f0f53fefb4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 848.883358] env[62503]: ERROR nova.compute.manager [instance: 04b9ed30-2cd0-4c07-9141-76f0f53fefb4] current.throw(*self._exc) [ 848.883726] env[62503]: ERROR nova.compute.manager [instance: 04b9ed30-2cd0-4c07-9141-76f0f53fefb4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 848.883726] env[62503]: ERROR nova.compute.manager [instance: 04b9ed30-2cd0-4c07-9141-76f0f53fefb4] result = function(*args, **kwargs) [ 848.883726] env[62503]: ERROR nova.compute.manager [instance: 04b9ed30-2cd0-4c07-9141-76f0f53fefb4] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 848.883726] env[62503]: ERROR nova.compute.manager [instance: 04b9ed30-2cd0-4c07-9141-76f0f53fefb4] return func(*args, **kwargs) [ 848.883726] env[62503]: ERROR nova.compute.manager [instance: 04b9ed30-2cd0-4c07-9141-76f0f53fefb4] File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 848.883726] env[62503]: ERROR nova.compute.manager [instance: 04b9ed30-2cd0-4c07-9141-76f0f53fefb4] raise e [ 848.883726] env[62503]: ERROR nova.compute.manager [instance: 04b9ed30-2cd0-4c07-9141-76f0f53fefb4] File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 848.883726] env[62503]: ERROR nova.compute.manager [instance: 04b9ed30-2cd0-4c07-9141-76f0f53fefb4] nwinfo = self.network_api.allocate_for_instance( [ 848.883726] env[62503]: ERROR nova.compute.manager [instance: 04b9ed30-2cd0-4c07-9141-76f0f53fefb4] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 848.883726] env[62503]: ERROR nova.compute.manager [instance: 04b9ed30-2cd0-4c07-9141-76f0f53fefb4] created_port_ids = self._update_ports_for_instance( [ 848.883726] env[62503]: ERROR nova.compute.manager [instance: 04b9ed30-2cd0-4c07-9141-76f0f53fefb4] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 848.883726] env[62503]: ERROR nova.compute.manager [instance: 04b9ed30-2cd0-4c07-9141-76f0f53fefb4] with excutils.save_and_reraise_exception(): [ 848.883726] env[62503]: ERROR nova.compute.manager [instance: 04b9ed30-2cd0-4c07-9141-76f0f53fefb4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 848.884084] env[62503]: ERROR nova.compute.manager [instance: 04b9ed30-2cd0-4c07-9141-76f0f53fefb4] self.force_reraise() [ 848.884084] env[62503]: ERROR nova.compute.manager [instance: 04b9ed30-2cd0-4c07-9141-76f0f53fefb4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 848.884084] env[62503]: ERROR nova.compute.manager [instance: 04b9ed30-2cd0-4c07-9141-76f0f53fefb4] raise self.value [ 848.884084] env[62503]: ERROR nova.compute.manager [instance: 04b9ed30-2cd0-4c07-9141-76f0f53fefb4] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 848.884084] env[62503]: ERROR nova.compute.manager [instance: 04b9ed30-2cd0-4c07-9141-76f0f53fefb4] updated_port = self._update_port( [ 848.884084] env[62503]: ERROR nova.compute.manager [instance: 04b9ed30-2cd0-4c07-9141-76f0f53fefb4] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 848.884084] env[62503]: ERROR nova.compute.manager [instance: 04b9ed30-2cd0-4c07-9141-76f0f53fefb4] _ensure_no_port_binding_failure(port) [ 848.884084] env[62503]: ERROR nova.compute.manager [instance: 04b9ed30-2cd0-4c07-9141-76f0f53fefb4] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 848.884084] env[62503]: ERROR nova.compute.manager [instance: 04b9ed30-2cd0-4c07-9141-76f0f53fefb4] raise exception.PortBindingFailed(port_id=port['id']) [ 848.884084] env[62503]: ERROR nova.compute.manager [instance: 04b9ed30-2cd0-4c07-9141-76f0f53fefb4] nova.exception.PortBindingFailed: Binding failed for port 595f989b-b0f0-4115-a28f-c984f19ae80d, please check neutron logs for more information. [ 848.884084] env[62503]: ERROR nova.compute.manager [instance: 04b9ed30-2cd0-4c07-9141-76f0f53fefb4] [ 848.884084] env[62503]: INFO nova.compute.manager [None req-d5cf39b4-4dfb-4634-a37c-ca8390f95f74 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] [instance: 04b9ed30-2cd0-4c07-9141-76f0f53fefb4] Terminating instance [ 848.885624] env[62503]: DEBUG oslo_concurrency.lockutils [None req-d5cf39b4-4dfb-4634-a37c-ca8390f95f74 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] Acquiring lock "refresh_cache-04b9ed30-2cd0-4c07-9141-76f0f53fefb4" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 848.983742] env[62503]: DEBUG nova.network.neutron [req-d18a663c-c11d-42ec-aa12-03481cea3814 req-caece822-90af-422a-be39-fddff7e223c3 service nova] [instance: 04b9ed30-2cd0-4c07-9141-76f0f53fefb4] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 849.067492] env[62503]: DEBUG nova.network.neutron [req-d18a663c-c11d-42ec-aa12-03481cea3814 req-caece822-90af-422a-be39-fddff7e223c3 service nova] [instance: 04b9ed30-2cd0-4c07-9141-76f0f53fefb4] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 849.070751] env[62503]: DEBUG nova.scheduler.client.report [None req-aaad013a-a4dc-432c-87d1-a452b2fca374 tempest-ServerPasswordTestJSON-330364349 tempest-ServerPasswordTestJSON-330364349-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 849.570170] env[62503]: DEBUG oslo_concurrency.lockutils [req-d18a663c-c11d-42ec-aa12-03481cea3814 req-caece822-90af-422a-be39-fddff7e223c3 service nova] Releasing lock "refresh_cache-04b9ed30-2cd0-4c07-9141-76f0f53fefb4" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 849.570688] env[62503]: DEBUG oslo_concurrency.lockutils [None req-d5cf39b4-4dfb-4634-a37c-ca8390f95f74 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] Acquired lock "refresh_cache-04b9ed30-2cd0-4c07-9141-76f0f53fefb4" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 849.570885] env[62503]: DEBUG nova.network.neutron [None req-d5cf39b4-4dfb-4634-a37c-ca8390f95f74 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] [instance: 04b9ed30-2cd0-4c07-9141-76f0f53fefb4] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 849.573914] env[62503]: DEBUG oslo_concurrency.lockutils [None req-aaad013a-a4dc-432c-87d1-a452b2fca374 tempest-ServerPasswordTestJSON-330364349 tempest-ServerPasswordTestJSON-330364349-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.755s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 849.574438] env[62503]: ERROR nova.compute.manager [None req-aaad013a-a4dc-432c-87d1-a452b2fca374 tempest-ServerPasswordTestJSON-330364349 tempest-ServerPasswordTestJSON-330364349-project-member] [instance: b1fc7438-2078-435a-9754-19a8a1bc6f5c] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 13ea3744-a93f-474f-bacd-9c28ddac981a, please check neutron logs for more information. [ 849.574438] env[62503]: ERROR nova.compute.manager [instance: b1fc7438-2078-435a-9754-19a8a1bc6f5c] Traceback (most recent call last): [ 849.574438] env[62503]: ERROR nova.compute.manager [instance: b1fc7438-2078-435a-9754-19a8a1bc6f5c] File "/opt/stack/nova/nova/compute/manager.py", line 2644, in _build_and_run_instance [ 849.574438] env[62503]: ERROR nova.compute.manager [instance: b1fc7438-2078-435a-9754-19a8a1bc6f5c] self.driver.spawn(context, instance, image_meta, [ 849.574438] env[62503]: ERROR nova.compute.manager [instance: b1fc7438-2078-435a-9754-19a8a1bc6f5c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 849.574438] env[62503]: ERROR nova.compute.manager [instance: b1fc7438-2078-435a-9754-19a8a1bc6f5c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 849.574438] env[62503]: ERROR nova.compute.manager [instance: b1fc7438-2078-435a-9754-19a8a1bc6f5c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 849.574438] env[62503]: ERROR nova.compute.manager [instance: b1fc7438-2078-435a-9754-19a8a1bc6f5c] vm_ref = self.build_virtual_machine(instance, [ 849.574438] env[62503]: ERROR nova.compute.manager [instance: b1fc7438-2078-435a-9754-19a8a1bc6f5c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 849.574438] env[62503]: ERROR nova.compute.manager [instance: b1fc7438-2078-435a-9754-19a8a1bc6f5c] vif_infos = vmwarevif.get_vif_info(self._session, [ 849.574438] env[62503]: ERROR nova.compute.manager [instance: b1fc7438-2078-435a-9754-19a8a1bc6f5c] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 849.574758] env[62503]: ERROR nova.compute.manager [instance: b1fc7438-2078-435a-9754-19a8a1bc6f5c] for vif in network_info: [ 849.574758] env[62503]: ERROR nova.compute.manager [instance: b1fc7438-2078-435a-9754-19a8a1bc6f5c] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 849.574758] env[62503]: ERROR nova.compute.manager [instance: b1fc7438-2078-435a-9754-19a8a1bc6f5c] return self._sync_wrapper(fn, *args, **kwargs) [ 849.574758] env[62503]: ERROR nova.compute.manager [instance: b1fc7438-2078-435a-9754-19a8a1bc6f5c] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 849.574758] env[62503]: ERROR nova.compute.manager [instance: b1fc7438-2078-435a-9754-19a8a1bc6f5c] self.wait() [ 849.574758] env[62503]: ERROR nova.compute.manager [instance: b1fc7438-2078-435a-9754-19a8a1bc6f5c] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 849.574758] env[62503]: ERROR nova.compute.manager [instance: b1fc7438-2078-435a-9754-19a8a1bc6f5c] self[:] = self._gt.wait() [ 849.574758] env[62503]: ERROR nova.compute.manager [instance: b1fc7438-2078-435a-9754-19a8a1bc6f5c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 849.574758] env[62503]: ERROR nova.compute.manager [instance: b1fc7438-2078-435a-9754-19a8a1bc6f5c] return self._exit_event.wait() [ 849.574758] env[62503]: ERROR nova.compute.manager [instance: b1fc7438-2078-435a-9754-19a8a1bc6f5c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 849.574758] env[62503]: ERROR nova.compute.manager [instance: b1fc7438-2078-435a-9754-19a8a1bc6f5c] current.throw(*self._exc) [ 849.574758] env[62503]: ERROR nova.compute.manager [instance: b1fc7438-2078-435a-9754-19a8a1bc6f5c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 849.574758] env[62503]: ERROR nova.compute.manager [instance: b1fc7438-2078-435a-9754-19a8a1bc6f5c] result = function(*args, **kwargs) [ 849.575069] env[62503]: ERROR nova.compute.manager [instance: b1fc7438-2078-435a-9754-19a8a1bc6f5c] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 849.575069] env[62503]: ERROR nova.compute.manager [instance: b1fc7438-2078-435a-9754-19a8a1bc6f5c] return func(*args, **kwargs) [ 849.575069] env[62503]: ERROR nova.compute.manager [instance: b1fc7438-2078-435a-9754-19a8a1bc6f5c] File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 849.575069] env[62503]: ERROR nova.compute.manager [instance: b1fc7438-2078-435a-9754-19a8a1bc6f5c] raise e [ 849.575069] env[62503]: ERROR nova.compute.manager [instance: b1fc7438-2078-435a-9754-19a8a1bc6f5c] File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 849.575069] env[62503]: ERROR nova.compute.manager [instance: b1fc7438-2078-435a-9754-19a8a1bc6f5c] nwinfo = self.network_api.allocate_for_instance( [ 849.575069] env[62503]: ERROR nova.compute.manager [instance: b1fc7438-2078-435a-9754-19a8a1bc6f5c] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 849.575069] env[62503]: ERROR nova.compute.manager [instance: b1fc7438-2078-435a-9754-19a8a1bc6f5c] created_port_ids = self._update_ports_for_instance( [ 849.575069] env[62503]: ERROR nova.compute.manager [instance: b1fc7438-2078-435a-9754-19a8a1bc6f5c] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 849.575069] env[62503]: ERROR nova.compute.manager [instance: b1fc7438-2078-435a-9754-19a8a1bc6f5c] with excutils.save_and_reraise_exception(): [ 849.575069] env[62503]: ERROR nova.compute.manager [instance: b1fc7438-2078-435a-9754-19a8a1bc6f5c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 849.575069] env[62503]: ERROR nova.compute.manager [instance: b1fc7438-2078-435a-9754-19a8a1bc6f5c] self.force_reraise() [ 849.575069] env[62503]: ERROR nova.compute.manager [instance: b1fc7438-2078-435a-9754-19a8a1bc6f5c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 849.575581] env[62503]: ERROR nova.compute.manager [instance: b1fc7438-2078-435a-9754-19a8a1bc6f5c] raise self.value [ 849.575581] env[62503]: ERROR nova.compute.manager [instance: b1fc7438-2078-435a-9754-19a8a1bc6f5c] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 849.575581] env[62503]: ERROR nova.compute.manager [instance: b1fc7438-2078-435a-9754-19a8a1bc6f5c] updated_port = self._update_port( [ 849.575581] env[62503]: ERROR nova.compute.manager [instance: b1fc7438-2078-435a-9754-19a8a1bc6f5c] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 849.575581] env[62503]: ERROR nova.compute.manager [instance: b1fc7438-2078-435a-9754-19a8a1bc6f5c] _ensure_no_port_binding_failure(port) [ 849.575581] env[62503]: ERROR nova.compute.manager [instance: b1fc7438-2078-435a-9754-19a8a1bc6f5c] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 849.575581] env[62503]: ERROR nova.compute.manager [instance: b1fc7438-2078-435a-9754-19a8a1bc6f5c] raise exception.PortBindingFailed(port_id=port['id']) [ 849.575581] env[62503]: ERROR nova.compute.manager [instance: b1fc7438-2078-435a-9754-19a8a1bc6f5c] nova.exception.PortBindingFailed: Binding failed for port 13ea3744-a93f-474f-bacd-9c28ddac981a, please check neutron logs for more information. [ 849.575581] env[62503]: ERROR nova.compute.manager [instance: b1fc7438-2078-435a-9754-19a8a1bc6f5c] [ 849.575581] env[62503]: DEBUG nova.compute.utils [None req-aaad013a-a4dc-432c-87d1-a452b2fca374 tempest-ServerPasswordTestJSON-330364349 tempest-ServerPasswordTestJSON-330364349-project-member] [instance: b1fc7438-2078-435a-9754-19a8a1bc6f5c] Binding failed for port 13ea3744-a93f-474f-bacd-9c28ddac981a, please check neutron logs for more information. {{(pid=62503) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 849.576275] env[62503]: DEBUG oslo_concurrency.lockutils [None req-b540304e-d51f-4246-b92a-23f24c2fb495 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.666s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 849.577658] env[62503]: INFO nova.compute.claims [None req-b540304e-d51f-4246-b92a-23f24c2fb495 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: c990f365-97df-4203-bd8c-dab822b2d8c3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 849.580771] env[62503]: DEBUG nova.compute.manager [None req-aaad013a-a4dc-432c-87d1-a452b2fca374 tempest-ServerPasswordTestJSON-330364349 tempest-ServerPasswordTestJSON-330364349-project-member] [instance: b1fc7438-2078-435a-9754-19a8a1bc6f5c] Build of instance b1fc7438-2078-435a-9754-19a8a1bc6f5c was re-scheduled: Binding failed for port 13ea3744-a93f-474f-bacd-9c28ddac981a, please check neutron logs for more information. {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2483}} [ 849.581238] env[62503]: DEBUG nova.compute.manager [None req-aaad013a-a4dc-432c-87d1-a452b2fca374 tempest-ServerPasswordTestJSON-330364349 tempest-ServerPasswordTestJSON-330364349-project-member] [instance: b1fc7438-2078-435a-9754-19a8a1bc6f5c] Unplugging VIFs for instance {{(pid=62503) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3009}} [ 849.581467] env[62503]: DEBUG oslo_concurrency.lockutils [None req-aaad013a-a4dc-432c-87d1-a452b2fca374 tempest-ServerPasswordTestJSON-330364349 tempest-ServerPasswordTestJSON-330364349-project-member] Acquiring lock "refresh_cache-b1fc7438-2078-435a-9754-19a8a1bc6f5c" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 849.581614] env[62503]: DEBUG oslo_concurrency.lockutils [None req-aaad013a-a4dc-432c-87d1-a452b2fca374 tempest-ServerPasswordTestJSON-330364349 tempest-ServerPasswordTestJSON-330364349-project-member] Acquired lock "refresh_cache-b1fc7438-2078-435a-9754-19a8a1bc6f5c" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 849.581777] env[62503]: DEBUG nova.network.neutron [None req-aaad013a-a4dc-432c-87d1-a452b2fca374 tempest-ServerPasswordTestJSON-330364349 tempest-ServerPasswordTestJSON-330364349-project-member] [instance: b1fc7438-2078-435a-9754-19a8a1bc6f5c] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 850.093052] env[62503]: DEBUG nova.network.neutron [None req-d5cf39b4-4dfb-4634-a37c-ca8390f95f74 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] [instance: 04b9ed30-2cd0-4c07-9141-76f0f53fefb4] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 850.101078] env[62503]: DEBUG nova.network.neutron [None req-aaad013a-a4dc-432c-87d1-a452b2fca374 tempest-ServerPasswordTestJSON-330364349 tempest-ServerPasswordTestJSON-330364349-project-member] [instance: b1fc7438-2078-435a-9754-19a8a1bc6f5c] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 850.185422] env[62503]: DEBUG nova.network.neutron [None req-aaad013a-a4dc-432c-87d1-a452b2fca374 tempest-ServerPasswordTestJSON-330364349 tempest-ServerPasswordTestJSON-330364349-project-member] [instance: b1fc7438-2078-435a-9754-19a8a1bc6f5c] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 850.237602] env[62503]: DEBUG nova.network.neutron [None req-d5cf39b4-4dfb-4634-a37c-ca8390f95f74 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] [instance: 04b9ed30-2cd0-4c07-9141-76f0f53fefb4] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 850.485326] env[62503]: DEBUG nova.compute.manager [req-cb4a32ab-637b-4810-9937-0453fbbd940b req-a69e2fec-2e94-448a-bc69-33505cc98066 service nova] [instance: 04b9ed30-2cd0-4c07-9141-76f0f53fefb4] Received event network-vif-deleted-595f989b-b0f0-4115-a28f-c984f19ae80d {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 850.688148] env[62503]: DEBUG oslo_concurrency.lockutils [None req-aaad013a-a4dc-432c-87d1-a452b2fca374 tempest-ServerPasswordTestJSON-330364349 tempest-ServerPasswordTestJSON-330364349-project-member] Releasing lock "refresh_cache-b1fc7438-2078-435a-9754-19a8a1bc6f5c" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 850.688762] env[62503]: DEBUG nova.compute.manager [None req-aaad013a-a4dc-432c-87d1-a452b2fca374 tempest-ServerPasswordTestJSON-330364349 tempest-ServerPasswordTestJSON-330364349-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62503) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3032}} [ 850.688762] env[62503]: DEBUG nova.compute.manager [None req-aaad013a-a4dc-432c-87d1-a452b2fca374 tempest-ServerPasswordTestJSON-330364349 tempest-ServerPasswordTestJSON-330364349-project-member] [instance: b1fc7438-2078-435a-9754-19a8a1bc6f5c] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 850.688762] env[62503]: DEBUG nova.network.neutron [None req-aaad013a-a4dc-432c-87d1-a452b2fca374 tempest-ServerPasswordTestJSON-330364349 tempest-ServerPasswordTestJSON-330364349-project-member] [instance: b1fc7438-2078-435a-9754-19a8a1bc6f5c] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 850.740681] env[62503]: DEBUG oslo_concurrency.lockutils [None req-d5cf39b4-4dfb-4634-a37c-ca8390f95f74 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] Releasing lock "refresh_cache-04b9ed30-2cd0-4c07-9141-76f0f53fefb4" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 850.741202] env[62503]: DEBUG nova.compute.manager [None req-d5cf39b4-4dfb-4634-a37c-ca8390f95f74 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] [instance: 04b9ed30-2cd0-4c07-9141-76f0f53fefb4] Start destroying the instance on the hypervisor. {{(pid=62503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3156}} [ 850.741453] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-d5cf39b4-4dfb-4634-a37c-ca8390f95f74 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] [instance: 04b9ed30-2cd0-4c07-9141-76f0f53fefb4] Destroying instance {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 850.741815] env[62503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4143c1cf-cf6d-4d61-bd7b-6cebe62ef3fd {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.756020] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32f6d386-5806-47d2-90d8-cb81b7b3beb2 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.771513] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d7ba170-4647-4464-a693-7ca1c38fbccc {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.781658] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc070b91-e60d-4f4a-abaa-8a5c969273e5 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.790448] env[62503]: WARNING nova.virt.vmwareapi.vmops [None req-d5cf39b4-4dfb-4634-a37c-ca8390f95f74 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] [instance: 04b9ed30-2cd0-4c07-9141-76f0f53fefb4] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 04b9ed30-2cd0-4c07-9141-76f0f53fefb4 could not be found. [ 850.790594] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-d5cf39b4-4dfb-4634-a37c-ca8390f95f74 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] [instance: 04b9ed30-2cd0-4c07-9141-76f0f53fefb4] Instance destroyed {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 850.790782] env[62503]: INFO nova.compute.manager [None req-d5cf39b4-4dfb-4634-a37c-ca8390f95f74 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] [instance: 04b9ed30-2cd0-4c07-9141-76f0f53fefb4] Took 0.05 seconds to destroy the instance on the hypervisor. [ 850.791126] env[62503]: DEBUG oslo.service.loopingcall [None req-d5cf39b4-4dfb-4634-a37c-ca8390f95f74 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 850.791723] env[62503]: DEBUG nova.compute.manager [-] [instance: 04b9ed30-2cd0-4c07-9141-76f0f53fefb4] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 850.791823] env[62503]: DEBUG nova.network.neutron [-] [instance: 04b9ed30-2cd0-4c07-9141-76f0f53fefb4] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 850.818211] env[62503]: DEBUG nova.network.neutron [-] [instance: 04b9ed30-2cd0-4c07-9141-76f0f53fefb4] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 850.819836] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62a6883a-90e3-4750-8121-80408780e221 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.828085] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0593a7f2-6939-43f5-ac31-f21f6ac158cf {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.841858] env[62503]: DEBUG nova.compute.provider_tree [None req-b540304e-d51f-4246-b92a-23f24c2fb495 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 850.874077] env[62503]: DEBUG nova.network.neutron [None req-aaad013a-a4dc-432c-87d1-a452b2fca374 tempest-ServerPasswordTestJSON-330364349 tempest-ServerPasswordTestJSON-330364349-project-member] [instance: b1fc7438-2078-435a-9754-19a8a1bc6f5c] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 851.323278] env[62503]: DEBUG nova.network.neutron [-] [instance: 04b9ed30-2cd0-4c07-9141-76f0f53fefb4] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 851.345287] env[62503]: DEBUG nova.scheduler.client.report [None req-b540304e-d51f-4246-b92a-23f24c2fb495 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 851.377558] env[62503]: DEBUG nova.network.neutron [None req-aaad013a-a4dc-432c-87d1-a452b2fca374 tempest-ServerPasswordTestJSON-330364349 tempest-ServerPasswordTestJSON-330364349-project-member] [instance: b1fc7438-2078-435a-9754-19a8a1bc6f5c] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 851.827565] env[62503]: INFO nova.compute.manager [-] [instance: 04b9ed30-2cd0-4c07-9141-76f0f53fefb4] Took 1.04 seconds to deallocate network for instance. [ 851.830756] env[62503]: DEBUG nova.compute.claims [None req-d5cf39b4-4dfb-4634-a37c-ca8390f95f74 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] [instance: 04b9ed30-2cd0-4c07-9141-76f0f53fefb4] Aborting claim: {{(pid=62503) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 851.830961] env[62503]: DEBUG oslo_concurrency.lockutils [None req-d5cf39b4-4dfb-4634-a37c-ca8390f95f74 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 851.855027] env[62503]: DEBUG oslo_concurrency.lockutils [None req-b540304e-d51f-4246-b92a-23f24c2fb495 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.279s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 851.855541] env[62503]: DEBUG nova.compute.manager [None req-b540304e-d51f-4246-b92a-23f24c2fb495 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: c990f365-97df-4203-bd8c-dab822b2d8c3] Start building networks asynchronously for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2832}} [ 851.858197] env[62503]: DEBUG oslo_concurrency.lockutils [None req-35e1767c-0254-4b7e-b241-cbaf9c357109 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.088s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 851.859968] env[62503]: INFO nova.compute.claims [None req-35e1767c-0254-4b7e-b241-cbaf9c357109 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: 9ccdc727-536e-4db8-bad4-960858254758] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 851.880628] env[62503]: INFO nova.compute.manager [None req-aaad013a-a4dc-432c-87d1-a452b2fca374 tempest-ServerPasswordTestJSON-330364349 tempest-ServerPasswordTestJSON-330364349-project-member] [instance: b1fc7438-2078-435a-9754-19a8a1bc6f5c] Took 1.19 seconds to deallocate network for instance. [ 852.270242] env[62503]: DEBUG oslo_service.periodic_task [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 852.270473] env[62503]: DEBUG oslo_service.periodic_task [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 852.365064] env[62503]: DEBUG nova.compute.utils [None req-b540304e-d51f-4246-b92a-23f24c2fb495 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Using /dev/sd instead of None {{(pid=62503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 852.370195] env[62503]: DEBUG nova.compute.manager [None req-b540304e-d51f-4246-b92a-23f24c2fb495 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: c990f365-97df-4203-bd8c-dab822b2d8c3] Allocating IP information in the background. {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 852.370195] env[62503]: DEBUG nova.network.neutron [None req-b540304e-d51f-4246-b92a-23f24c2fb495 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: c990f365-97df-4203-bd8c-dab822b2d8c3] allocate_for_instance() {{(pid=62503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 852.413016] env[62503]: DEBUG nova.policy [None req-b540304e-d51f-4246-b92a-23f24c2fb495 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '05e8f1583c434155ae657655880ba2c6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '44139c74b4b349af996a67f408a8441f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62503) authorize /opt/stack/nova/nova/policy.py:201}} [ 852.710997] env[62503]: DEBUG nova.network.neutron [None req-b540304e-d51f-4246-b92a-23f24c2fb495 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: c990f365-97df-4203-bd8c-dab822b2d8c3] Successfully created port: cfe47a55-23d4-4918-ad96-c1a32a7fc784 {{(pid=62503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 852.779226] env[62503]: DEBUG oslo_service.periodic_task [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 852.779226] env[62503]: DEBUG nova.compute.manager [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Starting heal instance info cache {{(pid=62503) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10224}} [ 852.779226] env[62503]: DEBUG nova.compute.manager [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Rebuilding the list of instances to heal {{(pid=62503) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10228}} [ 852.847629] env[62503]: DEBUG oslo_concurrency.lockutils [None req-157c52a0-86d9-4396-8baf-ed1051ee6632 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Acquiring lock "e693bcc2-3883-466d-913c-831146ca81e7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 852.847629] env[62503]: DEBUG oslo_concurrency.lockutils [None req-157c52a0-86d9-4396-8baf-ed1051ee6632 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Lock "e693bcc2-3883-466d-913c-831146ca81e7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 852.868105] env[62503]: DEBUG nova.compute.manager [None req-b540304e-d51f-4246-b92a-23f24c2fb495 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: c990f365-97df-4203-bd8c-dab822b2d8c3] Start building block device mappings for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2867}} [ 852.915327] env[62503]: INFO nova.scheduler.client.report [None req-aaad013a-a4dc-432c-87d1-a452b2fca374 tempest-ServerPasswordTestJSON-330364349 tempest-ServerPasswordTestJSON-330364349-project-member] Deleted allocations for instance b1fc7438-2078-435a-9754-19a8a1bc6f5c [ 853.067520] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af4ab358-650b-445e-8cc9-2de26c1f9569 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.075081] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-096217cc-fec3-4b79-87ec-22a9c823203e {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.105213] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ead59b85-fd64-459f-b66c-b79a510db4a3 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.112284] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f47cfdf-bc3a-44a9-af97-42ad0082143f {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.124739] env[62503]: DEBUG nova.compute.provider_tree [None req-35e1767c-0254-4b7e-b241-cbaf9c357109 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 853.283111] env[62503]: DEBUG nova.compute.manager [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] [instance: 4deb28e7-351b-41b7-90bb-afdde200f7fa] Skipping network cache update for instance because it is Building. {{(pid=62503) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10237}} [ 853.283195] env[62503]: DEBUG nova.compute.manager [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] [instance: 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2] Skipping network cache update for instance because it is Building. {{(pid=62503) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10237}} [ 853.283306] env[62503]: DEBUG nova.compute.manager [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] [instance: c5f2cc73-6bcd-4422-890b-3299d4cf4534] Skipping network cache update for instance because it is Building. {{(pid=62503) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10237}} [ 853.283432] env[62503]: DEBUG nova.compute.manager [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] [instance: 1e355e38-60c6-4e7f-beb4-160c4527ec51] Skipping network cache update for instance because it is Building. {{(pid=62503) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10237}} [ 853.283556] env[62503]: DEBUG nova.compute.manager [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] [instance: 04b9ed30-2cd0-4c07-9141-76f0f53fefb4] Skipping network cache update for instance because it is Building. {{(pid=62503) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10237}} [ 853.283676] env[62503]: DEBUG nova.compute.manager [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] [instance: c990f365-97df-4203-bd8c-dab822b2d8c3] Skipping network cache update for instance because it is Building. {{(pid=62503) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10237}} [ 853.284916] env[62503]: DEBUG nova.compute.manager [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] [instance: 9ccdc727-536e-4db8-bad4-960858254758] Skipping network cache update for instance because it is Building. {{(pid=62503) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10237}} [ 853.302165] env[62503]: DEBUG oslo_concurrency.lockutils [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Acquiring lock "refresh_cache-86422990-4215-4628-a7a7-4fdc910e304e" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 853.302165] env[62503]: DEBUG oslo_concurrency.lockutils [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Acquired lock "refresh_cache-86422990-4215-4628-a7a7-4fdc910e304e" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 853.302165] env[62503]: DEBUG nova.network.neutron [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] [instance: 86422990-4215-4628-a7a7-4fdc910e304e] Forcefully refreshing network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 853.302165] env[62503]: DEBUG nova.objects.instance [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Lazy-loading 'info_cache' on Instance uuid 86422990-4215-4628-a7a7-4fdc910e304e {{(pid=62503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 853.431046] env[62503]: DEBUG oslo_concurrency.lockutils [None req-aaad013a-a4dc-432c-87d1-a452b2fca374 tempest-ServerPasswordTestJSON-330364349 tempest-ServerPasswordTestJSON-330364349-project-member] Lock "b1fc7438-2078-435a-9754-19a8a1bc6f5c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 108.870s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 853.462982] env[62503]: DEBUG nova.compute.manager [req-ddc2f273-d3e1-43e9-88c7-6165aedb1957 req-84706001-fefe-43b4-a841-80197d261b31 service nova] [instance: c990f365-97df-4203-bd8c-dab822b2d8c3] Received event network-changed-cfe47a55-23d4-4918-ad96-c1a32a7fc784 {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 853.462982] env[62503]: DEBUG nova.compute.manager [req-ddc2f273-d3e1-43e9-88c7-6165aedb1957 req-84706001-fefe-43b4-a841-80197d261b31 service nova] [instance: c990f365-97df-4203-bd8c-dab822b2d8c3] Refreshing instance network info cache due to event network-changed-cfe47a55-23d4-4918-ad96-c1a32a7fc784. {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11432}} [ 853.462982] env[62503]: DEBUG oslo_concurrency.lockutils [req-ddc2f273-d3e1-43e9-88c7-6165aedb1957 req-84706001-fefe-43b4-a841-80197d261b31 service nova] Acquiring lock "refresh_cache-c990f365-97df-4203-bd8c-dab822b2d8c3" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 853.462982] env[62503]: DEBUG oslo_concurrency.lockutils [req-ddc2f273-d3e1-43e9-88c7-6165aedb1957 req-84706001-fefe-43b4-a841-80197d261b31 service nova] Acquired lock "refresh_cache-c990f365-97df-4203-bd8c-dab822b2d8c3" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 853.462982] env[62503]: DEBUG nova.network.neutron [req-ddc2f273-d3e1-43e9-88c7-6165aedb1957 req-84706001-fefe-43b4-a841-80197d261b31 service nova] [instance: c990f365-97df-4203-bd8c-dab822b2d8c3] Refreshing network info cache for port cfe47a55-23d4-4918-ad96-c1a32a7fc784 {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 853.629679] env[62503]: DEBUG nova.scheduler.client.report [None req-35e1767c-0254-4b7e-b241-cbaf9c357109 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 853.653451] env[62503]: ERROR nova.compute.manager [None req-b540304e-d51f-4246-b92a-23f24c2fb495 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port cfe47a55-23d4-4918-ad96-c1a32a7fc784, please check neutron logs for more information. [ 853.653451] env[62503]: ERROR nova.compute.manager Traceback (most recent call last): [ 853.653451] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 853.653451] env[62503]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 853.653451] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 853.653451] env[62503]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 853.653451] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 853.653451] env[62503]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 853.653451] env[62503]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 853.653451] env[62503]: ERROR nova.compute.manager self.force_reraise() [ 853.653451] env[62503]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 853.653451] env[62503]: ERROR nova.compute.manager raise self.value [ 853.653451] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 853.653451] env[62503]: ERROR nova.compute.manager updated_port = self._update_port( [ 853.653451] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 853.653451] env[62503]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 853.654126] env[62503]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 853.654126] env[62503]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 853.654126] env[62503]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port cfe47a55-23d4-4918-ad96-c1a32a7fc784, please check neutron logs for more information. [ 853.654126] env[62503]: ERROR nova.compute.manager [ 853.654126] env[62503]: Traceback (most recent call last): [ 853.654126] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 853.654126] env[62503]: listener.cb(fileno) [ 853.654126] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 853.654126] env[62503]: result = function(*args, **kwargs) [ 853.654126] env[62503]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 853.654126] env[62503]: return func(*args, **kwargs) [ 853.654126] env[62503]: File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 853.654126] env[62503]: raise e [ 853.654126] env[62503]: File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 853.654126] env[62503]: nwinfo = self.network_api.allocate_for_instance( [ 853.654126] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 853.654126] env[62503]: created_port_ids = self._update_ports_for_instance( [ 853.654126] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 853.654126] env[62503]: with excutils.save_and_reraise_exception(): [ 853.654126] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 853.654126] env[62503]: self.force_reraise() [ 853.654126] env[62503]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 853.654126] env[62503]: raise self.value [ 853.654126] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 853.654126] env[62503]: updated_port = self._update_port( [ 853.654126] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 853.654126] env[62503]: _ensure_no_port_binding_failure(port) [ 853.654126] env[62503]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 853.654126] env[62503]: raise exception.PortBindingFailed(port_id=port['id']) [ 853.654793] env[62503]: nova.exception.PortBindingFailed: Binding failed for port cfe47a55-23d4-4918-ad96-c1a32a7fc784, please check neutron logs for more information. [ 853.654793] env[62503]: Removing descriptor: 14 [ 853.880067] env[62503]: DEBUG nova.compute.manager [None req-b540304e-d51f-4246-b92a-23f24c2fb495 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: c990f365-97df-4203-bd8c-dab822b2d8c3] Start spawning the instance on the hypervisor. {{(pid=62503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2641}} [ 853.912071] env[62503]: DEBUG nova.virt.hardware [None req-b540304e-d51f-4246-b92a-23f24c2fb495 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:26:20Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:26:03Z,direct_url=,disk_format='vmdk',id=8150ca02-f879-471d-8913-459408f127a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26d9ee75d25b4018b8bb1fb11c8bd98c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:26:04Z,virtual_size=,visibility=), allow threads: False {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 853.917128] env[62503]: DEBUG nova.virt.hardware [None req-b540304e-d51f-4246-b92a-23f24c2fb495 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Flavor limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 853.917128] env[62503]: DEBUG nova.virt.hardware [None req-b540304e-d51f-4246-b92a-23f24c2fb495 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Image limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 853.917128] env[62503]: DEBUG nova.virt.hardware [None req-b540304e-d51f-4246-b92a-23f24c2fb495 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Flavor pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 853.917128] env[62503]: DEBUG nova.virt.hardware [None req-b540304e-d51f-4246-b92a-23f24c2fb495 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Image pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 853.921018] env[62503]: DEBUG nova.virt.hardware [None req-b540304e-d51f-4246-b92a-23f24c2fb495 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 853.921018] env[62503]: DEBUG nova.virt.hardware [None req-b540304e-d51f-4246-b92a-23f24c2fb495 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 853.921018] env[62503]: DEBUG nova.virt.hardware [None req-b540304e-d51f-4246-b92a-23f24c2fb495 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 853.921018] env[62503]: DEBUG nova.virt.hardware [None req-b540304e-d51f-4246-b92a-23f24c2fb495 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Got 1 possible topologies {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 853.921018] env[62503]: DEBUG nova.virt.hardware [None req-b540304e-d51f-4246-b92a-23f24c2fb495 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 853.921213] env[62503]: DEBUG nova.virt.hardware [None req-b540304e-d51f-4246-b92a-23f24c2fb495 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 853.921213] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6eaf6ba-ea9b-48f0-9f5c-28ab34d9d1c2 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.935263] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56c64054-a06f-45ec-9ff8-cffbacdb08dd {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.940304] env[62503]: DEBUG nova.compute.manager [None req-fc728101-2fc5-4ffa-a37b-1d922196d83a tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: b6fddb0d-70f5-433f-a0ef-0d6bffb35579] Starting instance... {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 853.955703] env[62503]: ERROR nova.compute.manager [None req-b540304e-d51f-4246-b92a-23f24c2fb495 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: c990f365-97df-4203-bd8c-dab822b2d8c3] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port cfe47a55-23d4-4918-ad96-c1a32a7fc784, please check neutron logs for more information. [ 853.955703] env[62503]: ERROR nova.compute.manager [instance: c990f365-97df-4203-bd8c-dab822b2d8c3] Traceback (most recent call last): [ 853.955703] env[62503]: ERROR nova.compute.manager [instance: c990f365-97df-4203-bd8c-dab822b2d8c3] File "/opt/stack/nova/nova/compute/manager.py", line 2897, in _build_resources [ 853.955703] env[62503]: ERROR nova.compute.manager [instance: c990f365-97df-4203-bd8c-dab822b2d8c3] yield resources [ 853.955703] env[62503]: ERROR nova.compute.manager [instance: c990f365-97df-4203-bd8c-dab822b2d8c3] File "/opt/stack/nova/nova/compute/manager.py", line 2644, in _build_and_run_instance [ 853.955703] env[62503]: ERROR nova.compute.manager [instance: c990f365-97df-4203-bd8c-dab822b2d8c3] self.driver.spawn(context, instance, image_meta, [ 853.955703] env[62503]: ERROR nova.compute.manager [instance: c990f365-97df-4203-bd8c-dab822b2d8c3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 853.955703] env[62503]: ERROR nova.compute.manager [instance: c990f365-97df-4203-bd8c-dab822b2d8c3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 853.955703] env[62503]: ERROR nova.compute.manager [instance: c990f365-97df-4203-bd8c-dab822b2d8c3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 853.955703] env[62503]: ERROR nova.compute.manager [instance: c990f365-97df-4203-bd8c-dab822b2d8c3] vm_ref = self.build_virtual_machine(instance, [ 853.955703] env[62503]: ERROR nova.compute.manager [instance: c990f365-97df-4203-bd8c-dab822b2d8c3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 853.956027] env[62503]: ERROR nova.compute.manager [instance: c990f365-97df-4203-bd8c-dab822b2d8c3] vif_infos = vmwarevif.get_vif_info(self._session, [ 853.956027] env[62503]: ERROR nova.compute.manager [instance: c990f365-97df-4203-bd8c-dab822b2d8c3] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 853.956027] env[62503]: ERROR nova.compute.manager [instance: c990f365-97df-4203-bd8c-dab822b2d8c3] for vif in network_info: [ 853.956027] env[62503]: ERROR nova.compute.manager [instance: c990f365-97df-4203-bd8c-dab822b2d8c3] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 853.956027] env[62503]: ERROR nova.compute.manager [instance: c990f365-97df-4203-bd8c-dab822b2d8c3] return self._sync_wrapper(fn, *args, **kwargs) [ 853.956027] env[62503]: ERROR nova.compute.manager [instance: c990f365-97df-4203-bd8c-dab822b2d8c3] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 853.956027] env[62503]: ERROR nova.compute.manager [instance: c990f365-97df-4203-bd8c-dab822b2d8c3] self.wait() [ 853.956027] env[62503]: ERROR nova.compute.manager [instance: c990f365-97df-4203-bd8c-dab822b2d8c3] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 853.956027] env[62503]: ERROR nova.compute.manager [instance: c990f365-97df-4203-bd8c-dab822b2d8c3] self[:] = self._gt.wait() [ 853.956027] env[62503]: ERROR nova.compute.manager [instance: c990f365-97df-4203-bd8c-dab822b2d8c3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 853.956027] env[62503]: ERROR nova.compute.manager [instance: c990f365-97df-4203-bd8c-dab822b2d8c3] return self._exit_event.wait() [ 853.956027] env[62503]: ERROR nova.compute.manager [instance: c990f365-97df-4203-bd8c-dab822b2d8c3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 853.956027] env[62503]: ERROR nova.compute.manager [instance: c990f365-97df-4203-bd8c-dab822b2d8c3] current.throw(*self._exc) [ 853.956381] env[62503]: ERROR nova.compute.manager [instance: c990f365-97df-4203-bd8c-dab822b2d8c3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 853.956381] env[62503]: ERROR nova.compute.manager [instance: c990f365-97df-4203-bd8c-dab822b2d8c3] result = function(*args, **kwargs) [ 853.956381] env[62503]: ERROR nova.compute.manager [instance: c990f365-97df-4203-bd8c-dab822b2d8c3] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 853.956381] env[62503]: ERROR nova.compute.manager [instance: c990f365-97df-4203-bd8c-dab822b2d8c3] return func(*args, **kwargs) [ 853.956381] env[62503]: ERROR nova.compute.manager [instance: c990f365-97df-4203-bd8c-dab822b2d8c3] File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 853.956381] env[62503]: ERROR nova.compute.manager [instance: c990f365-97df-4203-bd8c-dab822b2d8c3] raise e [ 853.956381] env[62503]: ERROR nova.compute.manager [instance: c990f365-97df-4203-bd8c-dab822b2d8c3] File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 853.956381] env[62503]: ERROR nova.compute.manager [instance: c990f365-97df-4203-bd8c-dab822b2d8c3] nwinfo = self.network_api.allocate_for_instance( [ 853.956381] env[62503]: ERROR nova.compute.manager [instance: c990f365-97df-4203-bd8c-dab822b2d8c3] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 853.956381] env[62503]: ERROR nova.compute.manager [instance: c990f365-97df-4203-bd8c-dab822b2d8c3] created_port_ids = self._update_ports_for_instance( [ 853.956381] env[62503]: ERROR nova.compute.manager [instance: c990f365-97df-4203-bd8c-dab822b2d8c3] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 853.956381] env[62503]: ERROR nova.compute.manager [instance: c990f365-97df-4203-bd8c-dab822b2d8c3] with excutils.save_and_reraise_exception(): [ 853.956381] env[62503]: ERROR nova.compute.manager [instance: c990f365-97df-4203-bd8c-dab822b2d8c3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 853.957119] env[62503]: ERROR nova.compute.manager [instance: c990f365-97df-4203-bd8c-dab822b2d8c3] self.force_reraise() [ 853.957119] env[62503]: ERROR nova.compute.manager [instance: c990f365-97df-4203-bd8c-dab822b2d8c3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 853.957119] env[62503]: ERROR nova.compute.manager [instance: c990f365-97df-4203-bd8c-dab822b2d8c3] raise self.value [ 853.957119] env[62503]: ERROR nova.compute.manager [instance: c990f365-97df-4203-bd8c-dab822b2d8c3] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 853.957119] env[62503]: ERROR nova.compute.manager [instance: c990f365-97df-4203-bd8c-dab822b2d8c3] updated_port = self._update_port( [ 853.957119] env[62503]: ERROR nova.compute.manager [instance: c990f365-97df-4203-bd8c-dab822b2d8c3] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 853.957119] env[62503]: ERROR nova.compute.manager [instance: c990f365-97df-4203-bd8c-dab822b2d8c3] _ensure_no_port_binding_failure(port) [ 853.957119] env[62503]: ERROR nova.compute.manager [instance: c990f365-97df-4203-bd8c-dab822b2d8c3] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 853.957119] env[62503]: ERROR nova.compute.manager [instance: c990f365-97df-4203-bd8c-dab822b2d8c3] raise exception.PortBindingFailed(port_id=port['id']) [ 853.957119] env[62503]: ERROR nova.compute.manager [instance: c990f365-97df-4203-bd8c-dab822b2d8c3] nova.exception.PortBindingFailed: Binding failed for port cfe47a55-23d4-4918-ad96-c1a32a7fc784, please check neutron logs for more information. [ 853.957119] env[62503]: ERROR nova.compute.manager [instance: c990f365-97df-4203-bd8c-dab822b2d8c3] [ 853.957119] env[62503]: INFO nova.compute.manager [None req-b540304e-d51f-4246-b92a-23f24c2fb495 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: c990f365-97df-4203-bd8c-dab822b2d8c3] Terminating instance [ 853.959408] env[62503]: DEBUG oslo_concurrency.lockutils [None req-b540304e-d51f-4246-b92a-23f24c2fb495 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Acquiring lock "refresh_cache-c990f365-97df-4203-bd8c-dab822b2d8c3" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 853.984910] env[62503]: DEBUG nova.network.neutron [req-ddc2f273-d3e1-43e9-88c7-6165aedb1957 req-84706001-fefe-43b4-a841-80197d261b31 service nova] [instance: c990f365-97df-4203-bd8c-dab822b2d8c3] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 854.103103] env[62503]: DEBUG nova.network.neutron [req-ddc2f273-d3e1-43e9-88c7-6165aedb1957 req-84706001-fefe-43b4-a841-80197d261b31 service nova] [instance: c990f365-97df-4203-bd8c-dab822b2d8c3] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 854.136073] env[62503]: DEBUG oslo_concurrency.lockutils [None req-35e1767c-0254-4b7e-b241-cbaf9c357109 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.275s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 854.136073] env[62503]: DEBUG nova.compute.manager [None req-35e1767c-0254-4b7e-b241-cbaf9c357109 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: 9ccdc727-536e-4db8-bad4-960858254758] Start building networks asynchronously for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2832}} [ 854.136676] env[62503]: DEBUG oslo_concurrency.lockutils [None req-dc7fb366-4910-454c-9b67-dd826f7f0468 tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 19.585s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 854.137138] env[62503]: DEBUG nova.objects.instance [None req-dc7fb366-4910-454c-9b67-dd826f7f0468 tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Lazy-loading 'resources' on Instance uuid 86422990-4215-4628-a7a7-4fdc910e304e {{(pid=62503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 854.331225] env[62503]: DEBUG nova.network.neutron [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] [instance: 86422990-4215-4628-a7a7-4fdc910e304e] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 854.480838] env[62503]: DEBUG oslo_concurrency.lockutils [None req-fc728101-2fc5-4ffa-a37b-1d922196d83a tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 854.606425] env[62503]: DEBUG oslo_concurrency.lockutils [req-ddc2f273-d3e1-43e9-88c7-6165aedb1957 req-84706001-fefe-43b4-a841-80197d261b31 service nova] Releasing lock "refresh_cache-c990f365-97df-4203-bd8c-dab822b2d8c3" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 854.606913] env[62503]: DEBUG oslo_concurrency.lockutils [None req-b540304e-d51f-4246-b92a-23f24c2fb495 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Acquired lock "refresh_cache-c990f365-97df-4203-bd8c-dab822b2d8c3" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 854.607159] env[62503]: DEBUG nova.network.neutron [None req-b540304e-d51f-4246-b92a-23f24c2fb495 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: c990f365-97df-4203-bd8c-dab822b2d8c3] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 854.642263] env[62503]: DEBUG nova.compute.utils [None req-35e1767c-0254-4b7e-b241-cbaf9c357109 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Using /dev/sd instead of None {{(pid=62503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 854.644281] env[62503]: DEBUG nova.compute.manager [None req-35e1767c-0254-4b7e-b241-cbaf9c357109 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: 9ccdc727-536e-4db8-bad4-960858254758] Allocating IP information in the background. {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 854.646635] env[62503]: DEBUG nova.network.neutron [None req-35e1767c-0254-4b7e-b241-cbaf9c357109 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: 9ccdc727-536e-4db8-bad4-960858254758] allocate_for_instance() {{(pid=62503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 854.711847] env[62503]: DEBUG nova.policy [None req-35e1767c-0254-4b7e-b241-cbaf9c357109 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '691809da402d4a29b085cfe3b22306b2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a833cd3315d0487cb3badd7b0d330a9a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62503) authorize /opt/stack/nova/nova/policy.py:201}} [ 854.814921] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa8731a1-67a5-4d51-bfcc-4033d2ac2286 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.823021] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb100bee-29bb-4ab1-a141-d74205ab29f5 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.855204] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df174af3-a672-4200-b071-e517c5d38c0b {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.862248] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-408b73a1-0dcd-4e5d-9983-515e7d23aa76 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.875961] env[62503]: DEBUG nova.compute.provider_tree [None req-dc7fb366-4910-454c-9b67-dd826f7f0468 tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 854.913035] env[62503]: DEBUG nova.network.neutron [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] [instance: 86422990-4215-4628-a7a7-4fdc910e304e] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 855.128461] env[62503]: DEBUG nova.network.neutron [None req-b540304e-d51f-4246-b92a-23f24c2fb495 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: c990f365-97df-4203-bd8c-dab822b2d8c3] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 855.147038] env[62503]: DEBUG nova.compute.manager [None req-35e1767c-0254-4b7e-b241-cbaf9c357109 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: 9ccdc727-536e-4db8-bad4-960858254758] Start building block device mappings for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2867}} [ 855.163571] env[62503]: DEBUG nova.network.neutron [None req-35e1767c-0254-4b7e-b241-cbaf9c357109 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: 9ccdc727-536e-4db8-bad4-960858254758] Successfully created port: 992a63b9-4bca-47ba-abed-96804de62f70 {{(pid=62503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 855.217304] env[62503]: DEBUG nova.network.neutron [None req-b540304e-d51f-4246-b92a-23f24c2fb495 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: c990f365-97df-4203-bd8c-dab822b2d8c3] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 855.380574] env[62503]: DEBUG nova.scheduler.client.report [None req-dc7fb366-4910-454c-9b67-dd826f7f0468 tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 855.415745] env[62503]: DEBUG oslo_concurrency.lockutils [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Releasing lock "refresh_cache-86422990-4215-4628-a7a7-4fdc910e304e" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 855.415963] env[62503]: DEBUG nova.compute.manager [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] [instance: 86422990-4215-4628-a7a7-4fdc910e304e] Updated the network info_cache for instance {{(pid=62503) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10295}} [ 855.416178] env[62503]: DEBUG oslo_service.periodic_task [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 855.416337] env[62503]: DEBUG oslo_service.periodic_task [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 855.416485] env[62503]: DEBUG oslo_service.periodic_task [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 855.416634] env[62503]: DEBUG oslo_service.periodic_task [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 855.416776] env[62503]: DEBUG oslo_service.periodic_task [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 855.416921] env[62503]: DEBUG oslo_service.periodic_task [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 855.417066] env[62503]: DEBUG nova.compute.manager [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62503) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10843}} [ 855.417411] env[62503]: DEBUG oslo_service.periodic_task [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 855.487347] env[62503]: DEBUG nova.compute.manager [req-7535db5d-23ec-454e-a441-cc4ffc3bf4b8 req-fdad77d6-a7eb-41d1-b857-1c1df15515f6 service nova] [instance: c990f365-97df-4203-bd8c-dab822b2d8c3] Received event network-vif-deleted-cfe47a55-23d4-4918-ad96-c1a32a7fc784 {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 855.721351] env[62503]: DEBUG oslo_concurrency.lockutils [None req-b540304e-d51f-4246-b92a-23f24c2fb495 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Releasing lock "refresh_cache-c990f365-97df-4203-bd8c-dab822b2d8c3" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 855.721778] env[62503]: DEBUG nova.compute.manager [None req-b540304e-d51f-4246-b92a-23f24c2fb495 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: c990f365-97df-4203-bd8c-dab822b2d8c3] Start destroying the instance on the hypervisor. {{(pid=62503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3156}} [ 855.721996] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-b540304e-d51f-4246-b92a-23f24c2fb495 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: c990f365-97df-4203-bd8c-dab822b2d8c3] Destroying instance {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 855.722311] env[62503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-fe12ed8c-e3f2-469c-a6e7-64711638042e {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.731810] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29d80c4c-b1bf-40a1-9508-2cc185497559 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.753619] env[62503]: WARNING nova.virt.vmwareapi.vmops [None req-b540304e-d51f-4246-b92a-23f24c2fb495 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: c990f365-97df-4203-bd8c-dab822b2d8c3] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance c990f365-97df-4203-bd8c-dab822b2d8c3 could not be found. [ 855.753875] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-b540304e-d51f-4246-b92a-23f24c2fb495 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: c990f365-97df-4203-bd8c-dab822b2d8c3] Instance destroyed {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 855.754106] env[62503]: INFO nova.compute.manager [None req-b540304e-d51f-4246-b92a-23f24c2fb495 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: c990f365-97df-4203-bd8c-dab822b2d8c3] Took 0.03 seconds to destroy the instance on the hypervisor. [ 855.754381] env[62503]: DEBUG oslo.service.loopingcall [None req-b540304e-d51f-4246-b92a-23f24c2fb495 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 855.754842] env[62503]: DEBUG nova.compute.manager [-] [instance: c990f365-97df-4203-bd8c-dab822b2d8c3] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 855.754842] env[62503]: DEBUG nova.network.neutron [-] [instance: c990f365-97df-4203-bd8c-dab822b2d8c3] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 855.770184] env[62503]: DEBUG nova.network.neutron [-] [instance: c990f365-97df-4203-bd8c-dab822b2d8c3] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 855.889315] env[62503]: DEBUG oslo_concurrency.lockutils [None req-dc7fb366-4910-454c-9b67-dd826f7f0468 tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.752s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 855.894116] env[62503]: DEBUG oslo_concurrency.lockutils [None req-d66ce770-1029-4d34-900e-8c450c39e993 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 19.231s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 855.918238] env[62503]: INFO nova.scheduler.client.report [None req-dc7fb366-4910-454c-9b67-dd826f7f0468 tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Deleted allocations for instance 86422990-4215-4628-a7a7-4fdc910e304e [ 855.922400] env[62503]: DEBUG oslo_concurrency.lockutils [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 856.157960] env[62503]: DEBUG nova.compute.manager [None req-35e1767c-0254-4b7e-b241-cbaf9c357109 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: 9ccdc727-536e-4db8-bad4-960858254758] Start spawning the instance on the hypervisor. {{(pid=62503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2641}} [ 856.183583] env[62503]: DEBUG nova.virt.hardware [None req-35e1767c-0254-4b7e-b241-cbaf9c357109 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:26:20Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:26:03Z,direct_url=,disk_format='vmdk',id=8150ca02-f879-471d-8913-459408f127a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26d9ee75d25b4018b8bb1fb11c8bd98c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:26:04Z,virtual_size=,visibility=), allow threads: False {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 856.183830] env[62503]: DEBUG nova.virt.hardware [None req-35e1767c-0254-4b7e-b241-cbaf9c357109 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Flavor limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 856.183987] env[62503]: DEBUG nova.virt.hardware [None req-35e1767c-0254-4b7e-b241-cbaf9c357109 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Image limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 856.184184] env[62503]: DEBUG nova.virt.hardware [None req-35e1767c-0254-4b7e-b241-cbaf9c357109 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Flavor pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 856.184333] env[62503]: DEBUG nova.virt.hardware [None req-35e1767c-0254-4b7e-b241-cbaf9c357109 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Image pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 856.184480] env[62503]: DEBUG nova.virt.hardware [None req-35e1767c-0254-4b7e-b241-cbaf9c357109 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 856.184685] env[62503]: DEBUG nova.virt.hardware [None req-35e1767c-0254-4b7e-b241-cbaf9c357109 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 856.184909] env[62503]: DEBUG nova.virt.hardware [None req-35e1767c-0254-4b7e-b241-cbaf9c357109 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 856.185182] env[62503]: DEBUG nova.virt.hardware [None req-35e1767c-0254-4b7e-b241-cbaf9c357109 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Got 1 possible topologies {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 856.185437] env[62503]: DEBUG nova.virt.hardware [None req-35e1767c-0254-4b7e-b241-cbaf9c357109 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 856.185658] env[62503]: DEBUG nova.virt.hardware [None req-35e1767c-0254-4b7e-b241-cbaf9c357109 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 856.186604] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f31036b0-ff40-4098-845b-e0c071fc33cc {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.194644] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ede47e1b-de88-4f73-b14b-cea214c57112 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.273503] env[62503]: DEBUG nova.network.neutron [-] [instance: c990f365-97df-4203-bd8c-dab822b2d8c3] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 856.430130] env[62503]: DEBUG oslo_concurrency.lockutils [None req-dc7fb366-4910-454c-9b67-dd826f7f0468 tempest-ServerShowV257Test-1479545476 tempest-ServerShowV257Test-1479545476-project-member] Lock "86422990-4215-4628-a7a7-4fdc910e304e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 25.616s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 856.546438] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f8f1c66-4e4f-42c5-a540-1da6941e8b41 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.557178] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c3fdb79-dd26-47bf-9439-7a3819bc7662 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.590868] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdfc1ba2-ae55-4611-80b7-970dc2144e7c {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.598529] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-860602a8-11df-418d-a794-9512fbb4deb3 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.611699] env[62503]: DEBUG nova.compute.provider_tree [None req-d66ce770-1029-4d34-900e-8c450c39e993 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 856.777934] env[62503]: INFO nova.compute.manager [-] [instance: c990f365-97df-4203-bd8c-dab822b2d8c3] Took 1.02 seconds to deallocate network for instance. [ 856.779354] env[62503]: DEBUG nova.compute.claims [None req-b540304e-d51f-4246-b92a-23f24c2fb495 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: c990f365-97df-4203-bd8c-dab822b2d8c3] Aborting claim: {{(pid=62503) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 856.779354] env[62503]: DEBUG oslo_concurrency.lockutils [None req-b540304e-d51f-4246-b92a-23f24c2fb495 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 857.115411] env[62503]: DEBUG nova.scheduler.client.report [None req-d66ce770-1029-4d34-900e-8c450c39e993 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 857.162692] env[62503]: DEBUG nova.network.neutron [None req-35e1767c-0254-4b7e-b241-cbaf9c357109 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: 9ccdc727-536e-4db8-bad4-960858254758] Successfully updated port: 992a63b9-4bca-47ba-abed-96804de62f70 {{(pid=62503) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 857.519428] env[62503]: DEBUG nova.compute.manager [req-57a09cc0-d8e7-4c22-af14-19afde66e28e req-64cdce2b-20b5-4df2-99ab-f300e7bc3706 service nova] [instance: 9ccdc727-536e-4db8-bad4-960858254758] Received event network-vif-plugged-992a63b9-4bca-47ba-abed-96804de62f70 {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 857.520792] env[62503]: DEBUG oslo_concurrency.lockutils [req-57a09cc0-d8e7-4c22-af14-19afde66e28e req-64cdce2b-20b5-4df2-99ab-f300e7bc3706 service nova] Acquiring lock "9ccdc727-536e-4db8-bad4-960858254758-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 857.522349] env[62503]: DEBUG oslo_concurrency.lockutils [req-57a09cc0-d8e7-4c22-af14-19afde66e28e req-64cdce2b-20b5-4df2-99ab-f300e7bc3706 service nova] Lock "9ccdc727-536e-4db8-bad4-960858254758-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 857.522349] env[62503]: DEBUG oslo_concurrency.lockutils [req-57a09cc0-d8e7-4c22-af14-19afde66e28e req-64cdce2b-20b5-4df2-99ab-f300e7bc3706 service nova] Lock "9ccdc727-536e-4db8-bad4-960858254758-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 857.522349] env[62503]: DEBUG nova.compute.manager [req-57a09cc0-d8e7-4c22-af14-19afde66e28e req-64cdce2b-20b5-4df2-99ab-f300e7bc3706 service nova] [instance: 9ccdc727-536e-4db8-bad4-960858254758] No waiting events found dispatching network-vif-plugged-992a63b9-4bca-47ba-abed-96804de62f70 {{(pid=62503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 857.522349] env[62503]: WARNING nova.compute.manager [req-57a09cc0-d8e7-4c22-af14-19afde66e28e req-64cdce2b-20b5-4df2-99ab-f300e7bc3706 service nova] [instance: 9ccdc727-536e-4db8-bad4-960858254758] Received unexpected event network-vif-plugged-992a63b9-4bca-47ba-abed-96804de62f70 for instance with vm_state building and task_state spawning. [ 857.522349] env[62503]: DEBUG nova.compute.manager [req-57a09cc0-d8e7-4c22-af14-19afde66e28e req-64cdce2b-20b5-4df2-99ab-f300e7bc3706 service nova] [instance: 9ccdc727-536e-4db8-bad4-960858254758] Received event network-changed-992a63b9-4bca-47ba-abed-96804de62f70 {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 857.522620] env[62503]: DEBUG nova.compute.manager [req-57a09cc0-d8e7-4c22-af14-19afde66e28e req-64cdce2b-20b5-4df2-99ab-f300e7bc3706 service nova] [instance: 9ccdc727-536e-4db8-bad4-960858254758] Refreshing instance network info cache due to event network-changed-992a63b9-4bca-47ba-abed-96804de62f70. {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11432}} [ 857.522620] env[62503]: DEBUG oslo_concurrency.lockutils [req-57a09cc0-d8e7-4c22-af14-19afde66e28e req-64cdce2b-20b5-4df2-99ab-f300e7bc3706 service nova] Acquiring lock "refresh_cache-9ccdc727-536e-4db8-bad4-960858254758" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 857.522620] env[62503]: DEBUG oslo_concurrency.lockutils [req-57a09cc0-d8e7-4c22-af14-19afde66e28e req-64cdce2b-20b5-4df2-99ab-f300e7bc3706 service nova] Acquired lock "refresh_cache-9ccdc727-536e-4db8-bad4-960858254758" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 857.522769] env[62503]: DEBUG nova.network.neutron [req-57a09cc0-d8e7-4c22-af14-19afde66e28e req-64cdce2b-20b5-4df2-99ab-f300e7bc3706 service nova] [instance: 9ccdc727-536e-4db8-bad4-960858254758] Refreshing network info cache for port 992a63b9-4bca-47ba-abed-96804de62f70 {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 857.621820] env[62503]: DEBUG oslo_concurrency.lockutils [None req-d66ce770-1029-4d34-900e-8c450c39e993 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.728s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 857.621820] env[62503]: ERROR nova.compute.manager [None req-d66ce770-1029-4d34-900e-8c450c39e993 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: 4deb28e7-351b-41b7-90bb-afdde200f7fa] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 3efea7b6-754e-4a5c-93d0-b78ff2e5133e, please check neutron logs for more information. [ 857.621820] env[62503]: ERROR nova.compute.manager [instance: 4deb28e7-351b-41b7-90bb-afdde200f7fa] Traceback (most recent call last): [ 857.621820] env[62503]: ERROR nova.compute.manager [instance: 4deb28e7-351b-41b7-90bb-afdde200f7fa] File "/opt/stack/nova/nova/compute/manager.py", line 2644, in _build_and_run_instance [ 857.621820] env[62503]: ERROR nova.compute.manager [instance: 4deb28e7-351b-41b7-90bb-afdde200f7fa] self.driver.spawn(context, instance, image_meta, [ 857.621820] env[62503]: ERROR nova.compute.manager [instance: 4deb28e7-351b-41b7-90bb-afdde200f7fa] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 857.621820] env[62503]: ERROR nova.compute.manager [instance: 4deb28e7-351b-41b7-90bb-afdde200f7fa] self._vmops.spawn(context, instance, image_meta, injected_files, [ 857.621820] env[62503]: ERROR nova.compute.manager [instance: 4deb28e7-351b-41b7-90bb-afdde200f7fa] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 857.621820] env[62503]: ERROR nova.compute.manager [instance: 4deb28e7-351b-41b7-90bb-afdde200f7fa] vm_ref = self.build_virtual_machine(instance, [ 857.622240] env[62503]: ERROR nova.compute.manager [instance: 4deb28e7-351b-41b7-90bb-afdde200f7fa] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 857.622240] env[62503]: ERROR nova.compute.manager [instance: 4deb28e7-351b-41b7-90bb-afdde200f7fa] vif_infos = vmwarevif.get_vif_info(self._session, [ 857.622240] env[62503]: ERROR nova.compute.manager [instance: 4deb28e7-351b-41b7-90bb-afdde200f7fa] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 857.622240] env[62503]: ERROR nova.compute.manager [instance: 4deb28e7-351b-41b7-90bb-afdde200f7fa] for vif in network_info: [ 857.622240] env[62503]: ERROR nova.compute.manager [instance: 4deb28e7-351b-41b7-90bb-afdde200f7fa] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 857.622240] env[62503]: ERROR nova.compute.manager [instance: 4deb28e7-351b-41b7-90bb-afdde200f7fa] return self._sync_wrapper(fn, *args, **kwargs) [ 857.622240] env[62503]: ERROR nova.compute.manager [instance: 4deb28e7-351b-41b7-90bb-afdde200f7fa] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 857.622240] env[62503]: ERROR nova.compute.manager [instance: 4deb28e7-351b-41b7-90bb-afdde200f7fa] self.wait() [ 857.622240] env[62503]: ERROR nova.compute.manager [instance: 4deb28e7-351b-41b7-90bb-afdde200f7fa] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 857.622240] env[62503]: ERROR nova.compute.manager [instance: 4deb28e7-351b-41b7-90bb-afdde200f7fa] self[:] = self._gt.wait() [ 857.622240] env[62503]: ERROR nova.compute.manager [instance: 4deb28e7-351b-41b7-90bb-afdde200f7fa] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 857.622240] env[62503]: ERROR nova.compute.manager [instance: 4deb28e7-351b-41b7-90bb-afdde200f7fa] return self._exit_event.wait() [ 857.622240] env[62503]: ERROR nova.compute.manager [instance: 4deb28e7-351b-41b7-90bb-afdde200f7fa] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 857.622587] env[62503]: ERROR nova.compute.manager [instance: 4deb28e7-351b-41b7-90bb-afdde200f7fa] result = hub.switch() [ 857.622587] env[62503]: ERROR nova.compute.manager [instance: 4deb28e7-351b-41b7-90bb-afdde200f7fa] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 857.622587] env[62503]: ERROR nova.compute.manager [instance: 4deb28e7-351b-41b7-90bb-afdde200f7fa] return self.greenlet.switch() [ 857.622587] env[62503]: ERROR nova.compute.manager [instance: 4deb28e7-351b-41b7-90bb-afdde200f7fa] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 857.622587] env[62503]: ERROR nova.compute.manager [instance: 4deb28e7-351b-41b7-90bb-afdde200f7fa] result = function(*args, **kwargs) [ 857.622587] env[62503]: ERROR nova.compute.manager [instance: 4deb28e7-351b-41b7-90bb-afdde200f7fa] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 857.622587] env[62503]: ERROR nova.compute.manager [instance: 4deb28e7-351b-41b7-90bb-afdde200f7fa] return func(*args, **kwargs) [ 857.622587] env[62503]: ERROR nova.compute.manager [instance: 4deb28e7-351b-41b7-90bb-afdde200f7fa] File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 857.622587] env[62503]: ERROR nova.compute.manager [instance: 4deb28e7-351b-41b7-90bb-afdde200f7fa] raise e [ 857.622587] env[62503]: ERROR nova.compute.manager [instance: 4deb28e7-351b-41b7-90bb-afdde200f7fa] File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 857.622587] env[62503]: ERROR nova.compute.manager [instance: 4deb28e7-351b-41b7-90bb-afdde200f7fa] nwinfo = self.network_api.allocate_for_instance( [ 857.622587] env[62503]: ERROR nova.compute.manager [instance: 4deb28e7-351b-41b7-90bb-afdde200f7fa] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 857.622587] env[62503]: ERROR nova.compute.manager [instance: 4deb28e7-351b-41b7-90bb-afdde200f7fa] created_port_ids = self._update_ports_for_instance( [ 857.622886] env[62503]: ERROR nova.compute.manager [instance: 4deb28e7-351b-41b7-90bb-afdde200f7fa] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 857.622886] env[62503]: ERROR nova.compute.manager [instance: 4deb28e7-351b-41b7-90bb-afdde200f7fa] with excutils.save_and_reraise_exception(): [ 857.622886] env[62503]: ERROR nova.compute.manager [instance: 4deb28e7-351b-41b7-90bb-afdde200f7fa] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 857.622886] env[62503]: ERROR nova.compute.manager [instance: 4deb28e7-351b-41b7-90bb-afdde200f7fa] self.force_reraise() [ 857.622886] env[62503]: ERROR nova.compute.manager [instance: 4deb28e7-351b-41b7-90bb-afdde200f7fa] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 857.622886] env[62503]: ERROR nova.compute.manager [instance: 4deb28e7-351b-41b7-90bb-afdde200f7fa] raise self.value [ 857.622886] env[62503]: ERROR nova.compute.manager [instance: 4deb28e7-351b-41b7-90bb-afdde200f7fa] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 857.622886] env[62503]: ERROR nova.compute.manager [instance: 4deb28e7-351b-41b7-90bb-afdde200f7fa] updated_port = self._update_port( [ 857.622886] env[62503]: ERROR nova.compute.manager [instance: 4deb28e7-351b-41b7-90bb-afdde200f7fa] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 857.622886] env[62503]: ERROR nova.compute.manager [instance: 4deb28e7-351b-41b7-90bb-afdde200f7fa] _ensure_no_port_binding_failure(port) [ 857.622886] env[62503]: ERROR nova.compute.manager [instance: 4deb28e7-351b-41b7-90bb-afdde200f7fa] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 857.622886] env[62503]: ERROR nova.compute.manager [instance: 4deb28e7-351b-41b7-90bb-afdde200f7fa] raise exception.PortBindingFailed(port_id=port['id']) [ 857.623188] env[62503]: ERROR nova.compute.manager [instance: 4deb28e7-351b-41b7-90bb-afdde200f7fa] nova.exception.PortBindingFailed: Binding failed for port 3efea7b6-754e-4a5c-93d0-b78ff2e5133e, please check neutron logs for more information. [ 857.623188] env[62503]: ERROR nova.compute.manager [instance: 4deb28e7-351b-41b7-90bb-afdde200f7fa] [ 857.623188] env[62503]: DEBUG nova.compute.utils [None req-d66ce770-1029-4d34-900e-8c450c39e993 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: 4deb28e7-351b-41b7-90bb-afdde200f7fa] Binding failed for port 3efea7b6-754e-4a5c-93d0-b78ff2e5133e, please check neutron logs for more information. {{(pid=62503) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 857.623188] env[62503]: DEBUG oslo_concurrency.lockutils [None req-1d96ce89-6413-4ef6-9cd7-d46364d67260 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 18.633s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 857.625879] env[62503]: DEBUG nova.compute.manager [None req-d66ce770-1029-4d34-900e-8c450c39e993 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: 4deb28e7-351b-41b7-90bb-afdde200f7fa] Build of instance 4deb28e7-351b-41b7-90bb-afdde200f7fa was re-scheduled: Binding failed for port 3efea7b6-754e-4a5c-93d0-b78ff2e5133e, please check neutron logs for more information. {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2483}} [ 857.625879] env[62503]: DEBUG nova.compute.manager [None req-d66ce770-1029-4d34-900e-8c450c39e993 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: 4deb28e7-351b-41b7-90bb-afdde200f7fa] Unplugging VIFs for instance {{(pid=62503) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3009}} [ 857.626113] env[62503]: DEBUG oslo_concurrency.lockutils [None req-d66ce770-1029-4d34-900e-8c450c39e993 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Acquiring lock "refresh_cache-4deb28e7-351b-41b7-90bb-afdde200f7fa" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 857.626184] env[62503]: DEBUG oslo_concurrency.lockutils [None req-d66ce770-1029-4d34-900e-8c450c39e993 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Acquired lock "refresh_cache-4deb28e7-351b-41b7-90bb-afdde200f7fa" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 857.627523] env[62503]: DEBUG nova.network.neutron [None req-d66ce770-1029-4d34-900e-8c450c39e993 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: 4deb28e7-351b-41b7-90bb-afdde200f7fa] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 857.665095] env[62503]: DEBUG oslo_concurrency.lockutils [None req-35e1767c-0254-4b7e-b241-cbaf9c357109 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Acquiring lock "refresh_cache-9ccdc727-536e-4db8-bad4-960858254758" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 858.056183] env[62503]: DEBUG nova.network.neutron [req-57a09cc0-d8e7-4c22-af14-19afde66e28e req-64cdce2b-20b5-4df2-99ab-f300e7bc3706 service nova] [instance: 9ccdc727-536e-4db8-bad4-960858254758] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 858.126832] env[62503]: DEBUG nova.network.neutron [req-57a09cc0-d8e7-4c22-af14-19afde66e28e req-64cdce2b-20b5-4df2-99ab-f300e7bc3706 service nova] [instance: 9ccdc727-536e-4db8-bad4-960858254758] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 858.145955] env[62503]: DEBUG nova.network.neutron [None req-d66ce770-1029-4d34-900e-8c450c39e993 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: 4deb28e7-351b-41b7-90bb-afdde200f7fa] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 858.259617] env[62503]: DEBUG nova.network.neutron [None req-d66ce770-1029-4d34-900e-8c450c39e993 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: 4deb28e7-351b-41b7-90bb-afdde200f7fa] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 858.269262] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a816a182-7e97-42cd-a240-f69d0ff7176a {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.277729] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0dcc9dd2-56d7-4dfb-87ac-4e6b31fcdd1d {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.306971] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cce0d93a-dc9a-470a-9303-f121f2bbf93a {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.313636] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0f0466c-c17a-4cba-960c-5a043ef55f03 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.326202] env[62503]: DEBUG nova.compute.provider_tree [None req-1d96ce89-6413-4ef6-9cd7-d46364d67260 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 858.634125] env[62503]: DEBUG oslo_concurrency.lockutils [req-57a09cc0-d8e7-4c22-af14-19afde66e28e req-64cdce2b-20b5-4df2-99ab-f300e7bc3706 service nova] Releasing lock "refresh_cache-9ccdc727-536e-4db8-bad4-960858254758" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 858.634524] env[62503]: DEBUG oslo_concurrency.lockutils [None req-35e1767c-0254-4b7e-b241-cbaf9c357109 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Acquired lock "refresh_cache-9ccdc727-536e-4db8-bad4-960858254758" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 858.634685] env[62503]: DEBUG nova.network.neutron [None req-35e1767c-0254-4b7e-b241-cbaf9c357109 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: 9ccdc727-536e-4db8-bad4-960858254758] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 858.762732] env[62503]: DEBUG oslo_concurrency.lockutils [None req-d66ce770-1029-4d34-900e-8c450c39e993 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Releasing lock "refresh_cache-4deb28e7-351b-41b7-90bb-afdde200f7fa" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 858.762969] env[62503]: DEBUG nova.compute.manager [None req-d66ce770-1029-4d34-900e-8c450c39e993 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62503) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3032}} [ 858.763188] env[62503]: DEBUG nova.compute.manager [None req-d66ce770-1029-4d34-900e-8c450c39e993 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: 4deb28e7-351b-41b7-90bb-afdde200f7fa] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 858.763358] env[62503]: DEBUG nova.network.neutron [None req-d66ce770-1029-4d34-900e-8c450c39e993 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: 4deb28e7-351b-41b7-90bb-afdde200f7fa] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 858.777880] env[62503]: DEBUG nova.network.neutron [None req-d66ce770-1029-4d34-900e-8c450c39e993 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: 4deb28e7-351b-41b7-90bb-afdde200f7fa] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 858.828778] env[62503]: DEBUG nova.scheduler.client.report [None req-1d96ce89-6413-4ef6-9cd7-d46364d67260 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 859.164576] env[62503]: DEBUG nova.network.neutron [None req-35e1767c-0254-4b7e-b241-cbaf9c357109 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: 9ccdc727-536e-4db8-bad4-960858254758] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 859.282543] env[62503]: DEBUG nova.network.neutron [None req-d66ce770-1029-4d34-900e-8c450c39e993 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: 4deb28e7-351b-41b7-90bb-afdde200f7fa] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 859.334353] env[62503]: DEBUG oslo_concurrency.lockutils [None req-1d96ce89-6413-4ef6-9cd7-d46364d67260 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.710s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 859.334353] env[62503]: ERROR nova.compute.manager [None req-1d96ce89-6413-4ef6-9cd7-d46364d67260 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028-project-member] [instance: 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 7de7e64b-635a-4946-9db0-68ebc5ffbf63, please check neutron logs for more information. [ 859.334353] env[62503]: ERROR nova.compute.manager [instance: 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2] Traceback (most recent call last): [ 859.334353] env[62503]: ERROR nova.compute.manager [instance: 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2] File "/opt/stack/nova/nova/compute/manager.py", line 2644, in _build_and_run_instance [ 859.334353] env[62503]: ERROR nova.compute.manager [instance: 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2] self.driver.spawn(context, instance, image_meta, [ 859.334353] env[62503]: ERROR nova.compute.manager [instance: 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 859.334353] env[62503]: ERROR nova.compute.manager [instance: 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2] self._vmops.spawn(context, instance, image_meta, injected_files, [ 859.334353] env[62503]: ERROR nova.compute.manager [instance: 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 859.334353] env[62503]: ERROR nova.compute.manager [instance: 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2] vm_ref = self.build_virtual_machine(instance, [ 859.336535] env[62503]: ERROR nova.compute.manager [instance: 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 859.336535] env[62503]: ERROR nova.compute.manager [instance: 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2] vif_infos = vmwarevif.get_vif_info(self._session, [ 859.336535] env[62503]: ERROR nova.compute.manager [instance: 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 859.336535] env[62503]: ERROR nova.compute.manager [instance: 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2] for vif in network_info: [ 859.336535] env[62503]: ERROR nova.compute.manager [instance: 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 859.336535] env[62503]: ERROR nova.compute.manager [instance: 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2] return self._sync_wrapper(fn, *args, **kwargs) [ 859.336535] env[62503]: ERROR nova.compute.manager [instance: 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 859.336535] env[62503]: ERROR nova.compute.manager [instance: 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2] self.wait() [ 859.336535] env[62503]: ERROR nova.compute.manager [instance: 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 859.336535] env[62503]: ERROR nova.compute.manager [instance: 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2] self[:] = self._gt.wait() [ 859.336535] env[62503]: ERROR nova.compute.manager [instance: 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 859.336535] env[62503]: ERROR nova.compute.manager [instance: 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2] return self._exit_event.wait() [ 859.336535] env[62503]: ERROR nova.compute.manager [instance: 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 859.347090] env[62503]: ERROR nova.compute.manager [instance: 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2] result = hub.switch() [ 859.347090] env[62503]: ERROR nova.compute.manager [instance: 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 859.347090] env[62503]: ERROR nova.compute.manager [instance: 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2] return self.greenlet.switch() [ 859.347090] env[62503]: ERROR nova.compute.manager [instance: 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 859.347090] env[62503]: ERROR nova.compute.manager [instance: 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2] result = function(*args, **kwargs) [ 859.347090] env[62503]: ERROR nova.compute.manager [instance: 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 859.347090] env[62503]: ERROR nova.compute.manager [instance: 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2] return func(*args, **kwargs) [ 859.347090] env[62503]: ERROR nova.compute.manager [instance: 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2] File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 859.347090] env[62503]: ERROR nova.compute.manager [instance: 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2] raise e [ 859.347090] env[62503]: ERROR nova.compute.manager [instance: 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2] File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 859.347090] env[62503]: ERROR nova.compute.manager [instance: 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2] nwinfo = self.network_api.allocate_for_instance( [ 859.347090] env[62503]: ERROR nova.compute.manager [instance: 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 859.347090] env[62503]: ERROR nova.compute.manager [instance: 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2] created_port_ids = self._update_ports_for_instance( [ 859.347499] env[62503]: ERROR nova.compute.manager [instance: 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 859.347499] env[62503]: ERROR nova.compute.manager [instance: 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2] with excutils.save_and_reraise_exception(): [ 859.347499] env[62503]: ERROR nova.compute.manager [instance: 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 859.347499] env[62503]: ERROR nova.compute.manager [instance: 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2] self.force_reraise() [ 859.347499] env[62503]: ERROR nova.compute.manager [instance: 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 859.347499] env[62503]: ERROR nova.compute.manager [instance: 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2] raise self.value [ 859.347499] env[62503]: ERROR nova.compute.manager [instance: 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 859.347499] env[62503]: ERROR nova.compute.manager [instance: 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2] updated_port = self._update_port( [ 859.347499] env[62503]: ERROR nova.compute.manager [instance: 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 859.347499] env[62503]: ERROR nova.compute.manager [instance: 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2] _ensure_no_port_binding_failure(port) [ 859.347499] env[62503]: ERROR nova.compute.manager [instance: 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 859.347499] env[62503]: ERROR nova.compute.manager [instance: 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2] raise exception.PortBindingFailed(port_id=port['id']) [ 859.347973] env[62503]: ERROR nova.compute.manager [instance: 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2] nova.exception.PortBindingFailed: Binding failed for port 7de7e64b-635a-4946-9db0-68ebc5ffbf63, please check neutron logs for more information. [ 859.347973] env[62503]: ERROR nova.compute.manager [instance: 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2] [ 859.347973] env[62503]: DEBUG nova.compute.utils [None req-1d96ce89-6413-4ef6-9cd7-d46364d67260 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028-project-member] [instance: 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2] Binding failed for port 7de7e64b-635a-4946-9db0-68ebc5ffbf63, please check neutron logs for more information. {{(pid=62503) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 859.347973] env[62503]: DEBUG nova.compute.manager [None req-1d96ce89-6413-4ef6-9cd7-d46364d67260 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028-project-member] [instance: 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2] Build of instance 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2 was re-scheduled: Binding failed for port 7de7e64b-635a-4946-9db0-68ebc5ffbf63, please check neutron logs for more information. {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2483}} [ 859.347973] env[62503]: DEBUG nova.compute.manager [None req-1d96ce89-6413-4ef6-9cd7-d46364d67260 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028-project-member] [instance: 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2] Unplugging VIFs for instance {{(pid=62503) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3009}} [ 859.348186] env[62503]: DEBUG oslo_concurrency.lockutils [None req-1d96ce89-6413-4ef6-9cd7-d46364d67260 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028-project-member] Acquiring lock "refresh_cache-35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 859.348186] env[62503]: DEBUG oslo_concurrency.lockutils [None req-1d96ce89-6413-4ef6-9cd7-d46364d67260 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028-project-member] Acquired lock "refresh_cache-35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 859.348186] env[62503]: DEBUG nova.network.neutron [None req-1d96ce89-6413-4ef6-9cd7-d46364d67260 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028-project-member] [instance: 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 859.348186] env[62503]: DEBUG oslo_concurrency.lockutils [None req-242ac6f9-00b2-4413-9a56-88c712c0138b tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.131s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 859.348186] env[62503]: INFO nova.compute.claims [None req-242ac6f9-00b2-4413-9a56-88c712c0138b tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] [instance: 6229dda6-90e8-457b-beb3-2107e3700b29] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 859.348338] env[62503]: DEBUG nova.network.neutron [None req-35e1767c-0254-4b7e-b241-cbaf9c357109 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: 9ccdc727-536e-4db8-bad4-960858254758] Updating instance_info_cache with network_info: [{"id": "992a63b9-4bca-47ba-abed-96804de62f70", "address": "fa:16:3e:a8:b1:2c", "network": {"id": "9b07889f-f8fb-4eed-a587-1e31b05ce03a", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1446403066-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a833cd3315d0487cb3badd7b0d330a9a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ec3f9e71-839a-429d-b211-d3dfc98ca4f6", "external-id": "nsx-vlan-transportzone-5", "segmentation_id": 5, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap992a63b9-4b", "ovs_interfaceid": "992a63b9-4bca-47ba-abed-96804de62f70", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 859.789717] env[62503]: INFO nova.compute.manager [None req-d66ce770-1029-4d34-900e-8c450c39e993 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: 4deb28e7-351b-41b7-90bb-afdde200f7fa] Took 1.03 seconds to deallocate network for instance. [ 859.849670] env[62503]: DEBUG oslo_concurrency.lockutils [None req-35e1767c-0254-4b7e-b241-cbaf9c357109 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Releasing lock "refresh_cache-9ccdc727-536e-4db8-bad4-960858254758" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 859.849942] env[62503]: DEBUG nova.compute.manager [None req-35e1767c-0254-4b7e-b241-cbaf9c357109 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: 9ccdc727-536e-4db8-bad4-960858254758] Instance network_info: |[{"id": "992a63b9-4bca-47ba-abed-96804de62f70", "address": "fa:16:3e:a8:b1:2c", "network": {"id": "9b07889f-f8fb-4eed-a587-1e31b05ce03a", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1446403066-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a833cd3315d0487cb3badd7b0d330a9a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ec3f9e71-839a-429d-b211-d3dfc98ca4f6", "external-id": "nsx-vlan-transportzone-5", "segmentation_id": 5, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap992a63b9-4b", "ovs_interfaceid": "992a63b9-4bca-47ba-abed-96804de62f70", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1999}} [ 859.850423] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-35e1767c-0254-4b7e-b241-cbaf9c357109 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: 9ccdc727-536e-4db8-bad4-960858254758] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a8:b1:2c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ec3f9e71-839a-429d-b211-d3dfc98ca4f6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '992a63b9-4bca-47ba-abed-96804de62f70', 'vif_model': 'vmxnet3'}] {{(pid=62503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 859.858164] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-35e1767c-0254-4b7e-b241-cbaf9c357109 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Creating folder: Project (a833cd3315d0487cb3badd7b0d330a9a). Parent ref: group-v294540. {{(pid=62503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 859.861422] env[62503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a8810d3a-fc4f-4bec-ac4b-67ddde452ce2 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.864550] env[62503]: DEBUG nova.network.neutron [None req-1d96ce89-6413-4ef6-9cd7-d46364d67260 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028-project-member] [instance: 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 859.874538] env[62503]: INFO nova.virt.vmwareapi.vm_util [None req-35e1767c-0254-4b7e-b241-cbaf9c357109 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Created folder: Project (a833cd3315d0487cb3badd7b0d330a9a) in parent group-v294540. [ 859.874725] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-35e1767c-0254-4b7e-b241-cbaf9c357109 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Creating folder: Instances. Parent ref: group-v294569. {{(pid=62503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 859.874944] env[62503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0501e46f-e768-4455-9480-5473cce80f5a {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.885453] env[62503]: INFO nova.virt.vmwareapi.vm_util [None req-35e1767c-0254-4b7e-b241-cbaf9c357109 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Created folder: Instances in parent group-v294569. [ 859.885453] env[62503]: DEBUG oslo.service.loopingcall [None req-35e1767c-0254-4b7e-b241-cbaf9c357109 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 859.885583] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9ccdc727-536e-4db8-bad4-960858254758] Creating VM on the ESX host {{(pid=62503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 859.885769] env[62503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6eee9722-4741-4265-83c4-1045d31c3e77 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.907444] env[62503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 859.907444] env[62503]: value = "task-1387789" [ 859.907444] env[62503]: _type = "Task" [ 859.907444] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 859.914584] env[62503]: DEBUG oslo_vmware.api [-] Task: {'id': task-1387789, 'name': CreateVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.938363] env[62503]: DEBUG nova.network.neutron [None req-1d96ce89-6413-4ef6-9cd7-d46364d67260 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028-project-member] [instance: 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 860.016835] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3b2f209-ba2b-40cd-ba19-e46e70c91763 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.024685] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90d1e140-e9ca-49ec-8083-64e566f483f8 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.055176] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-327b026e-78ab-4c33-bfaa-3f05ebc3b49c {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.062044] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6af602fe-2a43-4ecf-9415-ad3190e064f0 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.074828] env[62503]: DEBUG nova.compute.provider_tree [None req-242ac6f9-00b2-4413-9a56-88c712c0138b tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 860.417182] env[62503]: DEBUG oslo_vmware.api [-] Task: {'id': task-1387789, 'name': CreateVM_Task, 'duration_secs': 0.33981} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 860.417437] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9ccdc727-536e-4db8-bad4-960858254758] Created VM on the ESX host {{(pid=62503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 860.424637] env[62503]: DEBUG oslo_concurrency.lockutils [None req-35e1767c-0254-4b7e-b241-cbaf9c357109 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 860.424868] env[62503]: DEBUG oslo_concurrency.lockutils [None req-35e1767c-0254-4b7e-b241-cbaf9c357109 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Acquired lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 860.425210] env[62503]: DEBUG oslo_concurrency.lockutils [None req-35e1767c-0254-4b7e-b241-cbaf9c357109 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 860.425467] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8082f6bb-5c34-48e0-ad65-e07aee76a6aa {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.430108] env[62503]: DEBUG oslo_vmware.api [None req-35e1767c-0254-4b7e-b241-cbaf9c357109 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Waiting for the task: (returnval){ [ 860.430108] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52ad4d77-960f-a222-1192-d0f475f154e6" [ 860.430108] env[62503]: _type = "Task" [ 860.430108] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 860.437669] env[62503]: DEBUG oslo_vmware.api [None req-35e1767c-0254-4b7e-b241-cbaf9c357109 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52ad4d77-960f-a222-1192-d0f475f154e6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.441253] env[62503]: DEBUG oslo_concurrency.lockutils [None req-1d96ce89-6413-4ef6-9cd7-d46364d67260 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028-project-member] Releasing lock "refresh_cache-35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 860.441471] env[62503]: DEBUG nova.compute.manager [None req-1d96ce89-6413-4ef6-9cd7-d46364d67260 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62503) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3032}} [ 860.441650] env[62503]: DEBUG nova.compute.manager [None req-1d96ce89-6413-4ef6-9cd7-d46364d67260 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028-project-member] [instance: 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 860.441816] env[62503]: DEBUG nova.network.neutron [None req-1d96ce89-6413-4ef6-9cd7-d46364d67260 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028-project-member] [instance: 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 860.461827] env[62503]: DEBUG nova.network.neutron [None req-1d96ce89-6413-4ef6-9cd7-d46364d67260 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028-project-member] [instance: 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 860.578432] env[62503]: DEBUG nova.scheduler.client.report [None req-242ac6f9-00b2-4413-9a56-88c712c0138b tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 860.820329] env[62503]: INFO nova.scheduler.client.report [None req-d66ce770-1029-4d34-900e-8c450c39e993 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Deleted allocations for instance 4deb28e7-351b-41b7-90bb-afdde200f7fa [ 860.940022] env[62503]: DEBUG oslo_vmware.api [None req-35e1767c-0254-4b7e-b241-cbaf9c357109 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52ad4d77-960f-a222-1192-d0f475f154e6, 'name': SearchDatastore_Task, 'duration_secs': 0.011402} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 860.940342] env[62503]: DEBUG oslo_concurrency.lockutils [None req-35e1767c-0254-4b7e-b241-cbaf9c357109 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Releasing lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 860.940582] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-35e1767c-0254-4b7e-b241-cbaf9c357109 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: 9ccdc727-536e-4db8-bad4-960858254758] Processing image 8150ca02-f879-471d-8913-459408f127a1 {{(pid=62503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 860.940817] env[62503]: DEBUG oslo_concurrency.lockutils [None req-35e1767c-0254-4b7e-b241-cbaf9c357109 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 860.940964] env[62503]: DEBUG oslo_concurrency.lockutils [None req-35e1767c-0254-4b7e-b241-cbaf9c357109 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Acquired lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 860.941164] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-35e1767c-0254-4b7e-b241-cbaf9c357109 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 860.941419] env[62503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b99e45e9-f309-4027-8742-22264547d315 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.948950] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-35e1767c-0254-4b7e-b241-cbaf9c357109 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 860.949200] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-35e1767c-0254-4b7e-b241-cbaf9c357109 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 860.949872] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-03c20eef-ab42-41a6-abd4-1378106e96b8 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.954542] env[62503]: DEBUG oslo_vmware.api [None req-35e1767c-0254-4b7e-b241-cbaf9c357109 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Waiting for the task: (returnval){ [ 860.954542] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52666f13-6f96-4bbe-ee9e-a409aed06c27" [ 860.954542] env[62503]: _type = "Task" [ 860.954542] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 860.961805] env[62503]: DEBUG oslo_vmware.api [None req-35e1767c-0254-4b7e-b241-cbaf9c357109 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52666f13-6f96-4bbe-ee9e-a409aed06c27, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.964298] env[62503]: DEBUG nova.network.neutron [None req-1d96ce89-6413-4ef6-9cd7-d46364d67260 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028-project-member] [instance: 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 861.083218] env[62503]: DEBUG oslo_concurrency.lockutils [None req-242ac6f9-00b2-4413-9a56-88c712c0138b tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 1.743s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 861.083755] env[62503]: DEBUG nova.compute.manager [None req-242ac6f9-00b2-4413-9a56-88c712c0138b tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] [instance: 6229dda6-90e8-457b-beb3-2107e3700b29] Start building networks asynchronously for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2832}} [ 861.087844] env[62503]: DEBUG oslo_concurrency.lockutils [None req-578ece02-e979-44e0-9e11-eefd26357b90 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 17.211s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 861.327924] env[62503]: DEBUG oslo_concurrency.lockutils [None req-d66ce770-1029-4d34-900e-8c450c39e993 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Lock "4deb28e7-351b-41b7-90bb-afdde200f7fa" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 112.292s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 861.465366] env[62503]: DEBUG oslo_vmware.api [None req-35e1767c-0254-4b7e-b241-cbaf9c357109 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52666f13-6f96-4bbe-ee9e-a409aed06c27, 'name': SearchDatastore_Task, 'duration_secs': 0.016048} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 861.466363] env[62503]: INFO nova.compute.manager [None req-1d96ce89-6413-4ef6-9cd7-d46364d67260 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028-project-member] [instance: 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2] Took 1.02 seconds to deallocate network for instance. [ 861.468801] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1afddac7-ff78-4610-88a0-2e6b85f16c1a {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.474327] env[62503]: DEBUG oslo_vmware.api [None req-35e1767c-0254-4b7e-b241-cbaf9c357109 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Waiting for the task: (returnval){ [ 861.474327] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]528c9bcd-0631-c4d9-3c0f-c2b4f4296a70" [ 861.474327] env[62503]: _type = "Task" [ 861.474327] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 861.481814] env[62503]: DEBUG oslo_vmware.api [None req-35e1767c-0254-4b7e-b241-cbaf9c357109 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]528c9bcd-0631-c4d9-3c0f-c2b4f4296a70, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.592148] env[62503]: DEBUG nova.compute.utils [None req-242ac6f9-00b2-4413-9a56-88c712c0138b tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] Using /dev/sd instead of None {{(pid=62503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 861.597549] env[62503]: DEBUG nova.compute.manager [None req-242ac6f9-00b2-4413-9a56-88c712c0138b tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] [instance: 6229dda6-90e8-457b-beb3-2107e3700b29] Allocating IP information in the background. {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 861.597990] env[62503]: DEBUG nova.network.neutron [None req-242ac6f9-00b2-4413-9a56-88c712c0138b tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] [instance: 6229dda6-90e8-457b-beb3-2107e3700b29] allocate_for_instance() {{(pid=62503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 861.641531] env[62503]: DEBUG nova.policy [None req-242ac6f9-00b2-4413-9a56-88c712c0138b tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '13fbe651215a435384443e46e225ebaf', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a5f8c12d03a0446988d5335c00cee0ff', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62503) authorize /opt/stack/nova/nova/policy.py:201}} [ 861.726947] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11066d5a-5427-40c5-a528-46c11e6fd669 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.735150] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae3ae8ae-3650-435a-8d7a-9eb4784255ee {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.765431] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7353713c-88bf-4eec-8e1f-14ff71a9a11d {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.773058] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bb06097-d72a-47f3-8c57-cd056c5fc71a {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.787582] env[62503]: DEBUG nova.compute.provider_tree [None req-578ece02-e979-44e0-9e11-eefd26357b90 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 861.830633] env[62503]: DEBUG nova.compute.manager [None req-157c52a0-86d9-4396-8baf-ed1051ee6632 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: e693bcc2-3883-466d-913c-831146ca81e7] Starting instance... {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 861.956811] env[62503]: DEBUG nova.network.neutron [None req-242ac6f9-00b2-4413-9a56-88c712c0138b tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] [instance: 6229dda6-90e8-457b-beb3-2107e3700b29] Successfully created port: dd4c60a2-b788-4d39-a0ca-b15676519c29 {{(pid=62503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 861.986541] env[62503]: DEBUG oslo_vmware.api [None req-35e1767c-0254-4b7e-b241-cbaf9c357109 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]528c9bcd-0631-c4d9-3c0f-c2b4f4296a70, 'name': SearchDatastore_Task, 'duration_secs': 0.009564} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 861.986792] env[62503]: DEBUG oslo_concurrency.lockutils [None req-35e1767c-0254-4b7e-b241-cbaf9c357109 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Releasing lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 861.987183] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-35e1767c-0254-4b7e-b241-cbaf9c357109 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk to [datastore1] 9ccdc727-536e-4db8-bad4-960858254758/9ccdc727-536e-4db8-bad4-960858254758.vmdk {{(pid=62503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 861.987294] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4d0524f5-3331-416b-affb-5f656fb77a3b {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.994822] env[62503]: DEBUG oslo_vmware.api [None req-35e1767c-0254-4b7e-b241-cbaf9c357109 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Waiting for the task: (returnval){ [ 861.994822] env[62503]: value = "task-1387790" [ 861.994822] env[62503]: _type = "Task" [ 861.994822] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 862.003468] env[62503]: DEBUG oslo_vmware.api [None req-35e1767c-0254-4b7e-b241-cbaf9c357109 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Task: {'id': task-1387790, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.098610] env[62503]: DEBUG nova.compute.manager [None req-242ac6f9-00b2-4413-9a56-88c712c0138b tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] [instance: 6229dda6-90e8-457b-beb3-2107e3700b29] Start building block device mappings for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2867}} [ 862.238426] env[62503]: DEBUG nova.network.neutron [None req-242ac6f9-00b2-4413-9a56-88c712c0138b tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] [instance: 6229dda6-90e8-457b-beb3-2107e3700b29] Successfully created port: e4a6a500-8bcd-461e-8408-055dbdd07fe1 {{(pid=62503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 862.290945] env[62503]: DEBUG nova.scheduler.client.report [None req-578ece02-e979-44e0-9e11-eefd26357b90 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 862.355298] env[62503]: DEBUG oslo_concurrency.lockutils [None req-157c52a0-86d9-4396-8baf-ed1051ee6632 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 862.507968] env[62503]: DEBUG oslo_vmware.api [None req-35e1767c-0254-4b7e-b241-cbaf9c357109 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Task: {'id': task-1387790, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.509153] env[62503]: INFO nova.scheduler.client.report [None req-1d96ce89-6413-4ef6-9cd7-d46364d67260 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028-project-member] Deleted allocations for instance 35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2 [ 862.797106] env[62503]: DEBUG oslo_concurrency.lockutils [None req-578ece02-e979-44e0-9e11-eefd26357b90 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.711s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 862.797951] env[62503]: ERROR nova.compute.manager [None req-578ece02-e979-44e0-9e11-eefd26357b90 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: c5f2cc73-6bcd-4422-890b-3299d4cf4534] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 4791ae83-b652-4809-8446-60fd6b4220e0, please check neutron logs for more information. [ 862.797951] env[62503]: ERROR nova.compute.manager [instance: c5f2cc73-6bcd-4422-890b-3299d4cf4534] Traceback (most recent call last): [ 862.797951] env[62503]: ERROR nova.compute.manager [instance: c5f2cc73-6bcd-4422-890b-3299d4cf4534] File "/opt/stack/nova/nova/compute/manager.py", line 2644, in _build_and_run_instance [ 862.797951] env[62503]: ERROR nova.compute.manager [instance: c5f2cc73-6bcd-4422-890b-3299d4cf4534] self.driver.spawn(context, instance, image_meta, [ 862.797951] env[62503]: ERROR nova.compute.manager [instance: c5f2cc73-6bcd-4422-890b-3299d4cf4534] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 862.797951] env[62503]: ERROR nova.compute.manager [instance: c5f2cc73-6bcd-4422-890b-3299d4cf4534] self._vmops.spawn(context, instance, image_meta, injected_files, [ 862.797951] env[62503]: ERROR nova.compute.manager [instance: c5f2cc73-6bcd-4422-890b-3299d4cf4534] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 862.797951] env[62503]: ERROR nova.compute.manager [instance: c5f2cc73-6bcd-4422-890b-3299d4cf4534] vm_ref = self.build_virtual_machine(instance, [ 862.797951] env[62503]: ERROR nova.compute.manager [instance: c5f2cc73-6bcd-4422-890b-3299d4cf4534] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 862.797951] env[62503]: ERROR nova.compute.manager [instance: c5f2cc73-6bcd-4422-890b-3299d4cf4534] vif_infos = vmwarevif.get_vif_info(self._session, [ 862.797951] env[62503]: ERROR nova.compute.manager [instance: c5f2cc73-6bcd-4422-890b-3299d4cf4534] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 862.798332] env[62503]: ERROR nova.compute.manager [instance: c5f2cc73-6bcd-4422-890b-3299d4cf4534] for vif in network_info: [ 862.798332] env[62503]: ERROR nova.compute.manager [instance: c5f2cc73-6bcd-4422-890b-3299d4cf4534] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 862.798332] env[62503]: ERROR nova.compute.manager [instance: c5f2cc73-6bcd-4422-890b-3299d4cf4534] return self._sync_wrapper(fn, *args, **kwargs) [ 862.798332] env[62503]: ERROR nova.compute.manager [instance: c5f2cc73-6bcd-4422-890b-3299d4cf4534] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 862.798332] env[62503]: ERROR nova.compute.manager [instance: c5f2cc73-6bcd-4422-890b-3299d4cf4534] self.wait() [ 862.798332] env[62503]: ERROR nova.compute.manager [instance: c5f2cc73-6bcd-4422-890b-3299d4cf4534] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 862.798332] env[62503]: ERROR nova.compute.manager [instance: c5f2cc73-6bcd-4422-890b-3299d4cf4534] self[:] = self._gt.wait() [ 862.798332] env[62503]: ERROR nova.compute.manager [instance: c5f2cc73-6bcd-4422-890b-3299d4cf4534] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 862.798332] env[62503]: ERROR nova.compute.manager [instance: c5f2cc73-6bcd-4422-890b-3299d4cf4534] return self._exit_event.wait() [ 862.798332] env[62503]: ERROR nova.compute.manager [instance: c5f2cc73-6bcd-4422-890b-3299d4cf4534] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 862.798332] env[62503]: ERROR nova.compute.manager [instance: c5f2cc73-6bcd-4422-890b-3299d4cf4534] current.throw(*self._exc) [ 862.798332] env[62503]: ERROR nova.compute.manager [instance: c5f2cc73-6bcd-4422-890b-3299d4cf4534] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 862.798332] env[62503]: ERROR nova.compute.manager [instance: c5f2cc73-6bcd-4422-890b-3299d4cf4534] result = function(*args, **kwargs) [ 862.798667] env[62503]: ERROR nova.compute.manager [instance: c5f2cc73-6bcd-4422-890b-3299d4cf4534] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 862.798667] env[62503]: ERROR nova.compute.manager [instance: c5f2cc73-6bcd-4422-890b-3299d4cf4534] return func(*args, **kwargs) [ 862.798667] env[62503]: ERROR nova.compute.manager [instance: c5f2cc73-6bcd-4422-890b-3299d4cf4534] File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 862.798667] env[62503]: ERROR nova.compute.manager [instance: c5f2cc73-6bcd-4422-890b-3299d4cf4534] raise e [ 862.798667] env[62503]: ERROR nova.compute.manager [instance: c5f2cc73-6bcd-4422-890b-3299d4cf4534] File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 862.798667] env[62503]: ERROR nova.compute.manager [instance: c5f2cc73-6bcd-4422-890b-3299d4cf4534] nwinfo = self.network_api.allocate_for_instance( [ 862.798667] env[62503]: ERROR nova.compute.manager [instance: c5f2cc73-6bcd-4422-890b-3299d4cf4534] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 862.798667] env[62503]: ERROR nova.compute.manager [instance: c5f2cc73-6bcd-4422-890b-3299d4cf4534] created_port_ids = self._update_ports_for_instance( [ 862.798667] env[62503]: ERROR nova.compute.manager [instance: c5f2cc73-6bcd-4422-890b-3299d4cf4534] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 862.798667] env[62503]: ERROR nova.compute.manager [instance: c5f2cc73-6bcd-4422-890b-3299d4cf4534] with excutils.save_and_reraise_exception(): [ 862.798667] env[62503]: ERROR nova.compute.manager [instance: c5f2cc73-6bcd-4422-890b-3299d4cf4534] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 862.798667] env[62503]: ERROR nova.compute.manager [instance: c5f2cc73-6bcd-4422-890b-3299d4cf4534] self.force_reraise() [ 862.798667] env[62503]: ERROR nova.compute.manager [instance: c5f2cc73-6bcd-4422-890b-3299d4cf4534] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 862.799051] env[62503]: ERROR nova.compute.manager [instance: c5f2cc73-6bcd-4422-890b-3299d4cf4534] raise self.value [ 862.799051] env[62503]: ERROR nova.compute.manager [instance: c5f2cc73-6bcd-4422-890b-3299d4cf4534] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 862.799051] env[62503]: ERROR nova.compute.manager [instance: c5f2cc73-6bcd-4422-890b-3299d4cf4534] updated_port = self._update_port( [ 862.799051] env[62503]: ERROR nova.compute.manager [instance: c5f2cc73-6bcd-4422-890b-3299d4cf4534] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 862.799051] env[62503]: ERROR nova.compute.manager [instance: c5f2cc73-6bcd-4422-890b-3299d4cf4534] _ensure_no_port_binding_failure(port) [ 862.799051] env[62503]: ERROR nova.compute.manager [instance: c5f2cc73-6bcd-4422-890b-3299d4cf4534] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 862.799051] env[62503]: ERROR nova.compute.manager [instance: c5f2cc73-6bcd-4422-890b-3299d4cf4534] raise exception.PortBindingFailed(port_id=port['id']) [ 862.799051] env[62503]: ERROR nova.compute.manager [instance: c5f2cc73-6bcd-4422-890b-3299d4cf4534] nova.exception.PortBindingFailed: Binding failed for port 4791ae83-b652-4809-8446-60fd6b4220e0, please check neutron logs for more information. [ 862.799051] env[62503]: ERROR nova.compute.manager [instance: c5f2cc73-6bcd-4422-890b-3299d4cf4534] [ 862.799051] env[62503]: DEBUG nova.compute.utils [None req-578ece02-e979-44e0-9e11-eefd26357b90 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: c5f2cc73-6bcd-4422-890b-3299d4cf4534] Binding failed for port 4791ae83-b652-4809-8446-60fd6b4220e0, please check neutron logs for more information. {{(pid=62503) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 862.799988] env[62503]: DEBUG oslo_concurrency.lockutils [None req-66c68936-6590-4907-b875-c3ea4a73524c tempest-ServersNegativeTestJSON-1133423638 tempest-ServersNegativeTestJSON-1133423638-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 16.961s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 862.803535] env[62503]: DEBUG nova.compute.manager [None req-578ece02-e979-44e0-9e11-eefd26357b90 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: c5f2cc73-6bcd-4422-890b-3299d4cf4534] Build of instance c5f2cc73-6bcd-4422-890b-3299d4cf4534 was re-scheduled: Binding failed for port 4791ae83-b652-4809-8446-60fd6b4220e0, please check neutron logs for more information. {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2483}} [ 862.803688] env[62503]: DEBUG nova.compute.manager [None req-578ece02-e979-44e0-9e11-eefd26357b90 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: c5f2cc73-6bcd-4422-890b-3299d4cf4534] Unplugging VIFs for instance {{(pid=62503) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3009}} [ 862.803960] env[62503]: DEBUG oslo_concurrency.lockutils [None req-578ece02-e979-44e0-9e11-eefd26357b90 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Acquiring lock "refresh_cache-c5f2cc73-6bcd-4422-890b-3299d4cf4534" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 862.804570] env[62503]: DEBUG oslo_concurrency.lockutils [None req-578ece02-e979-44e0-9e11-eefd26357b90 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Acquired lock "refresh_cache-c5f2cc73-6bcd-4422-890b-3299d4cf4534" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 862.804570] env[62503]: DEBUG nova.network.neutron [None req-578ece02-e979-44e0-9e11-eefd26357b90 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: c5f2cc73-6bcd-4422-890b-3299d4cf4534] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 862.877155] env[62503]: DEBUG oslo_concurrency.lockutils [None req-a01f7696-ade1-4d90-9188-a82a8c2369af tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Acquiring lock "c9129f68-c755-4b78-b067-b77b01048c02" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 862.877383] env[62503]: DEBUG oslo_concurrency.lockutils [None req-a01f7696-ade1-4d90-9188-a82a8c2369af tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Lock "c9129f68-c755-4b78-b067-b77b01048c02" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 863.005553] env[62503]: DEBUG oslo_vmware.api [None req-35e1767c-0254-4b7e-b241-cbaf9c357109 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Task: {'id': task-1387790, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.517248} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 863.006187] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-35e1767c-0254-4b7e-b241-cbaf9c357109 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk to [datastore1] 9ccdc727-536e-4db8-bad4-960858254758/9ccdc727-536e-4db8-bad4-960858254758.vmdk {{(pid=62503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 863.006399] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-35e1767c-0254-4b7e-b241-cbaf9c357109 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: 9ccdc727-536e-4db8-bad4-960858254758] Extending root virtual disk to 1048576 {{(pid=62503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 863.006658] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1aab136b-e4c6-481c-a1f9-ddf7dad8640b {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.012850] env[62503]: DEBUG oslo_vmware.api [None req-35e1767c-0254-4b7e-b241-cbaf9c357109 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Waiting for the task: (returnval){ [ 863.012850] env[62503]: value = "task-1387791" [ 863.012850] env[62503]: _type = "Task" [ 863.012850] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 863.017929] env[62503]: DEBUG oslo_concurrency.lockutils [None req-1d96ce89-6413-4ef6-9cd7-d46364d67260 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028 tempest-FloatingIPsAssociationNegativeTestJSON-1466583028-project-member] Lock "35ba5abb-ec2d-43ed-a4c4-7f6d18ccc1b2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 109.667s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 863.022721] env[62503]: DEBUG oslo_vmware.api [None req-35e1767c-0254-4b7e-b241-cbaf9c357109 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Task: {'id': task-1387791, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.107109] env[62503]: DEBUG nova.compute.manager [None req-242ac6f9-00b2-4413-9a56-88c712c0138b tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] [instance: 6229dda6-90e8-457b-beb3-2107e3700b29] Start spawning the instance on the hypervisor. {{(pid=62503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2641}} [ 863.131789] env[62503]: DEBUG nova.virt.hardware [None req-242ac6f9-00b2-4413-9a56-88c712c0138b tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:26:20Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:26:03Z,direct_url=,disk_format='vmdk',id=8150ca02-f879-471d-8913-459408f127a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26d9ee75d25b4018b8bb1fb11c8bd98c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:26:04Z,virtual_size=,visibility=), allow threads: False {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 863.132062] env[62503]: DEBUG nova.virt.hardware [None req-242ac6f9-00b2-4413-9a56-88c712c0138b tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] Flavor limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 863.132228] env[62503]: DEBUG nova.virt.hardware [None req-242ac6f9-00b2-4413-9a56-88c712c0138b tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] Image limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 863.132512] env[62503]: DEBUG nova.virt.hardware [None req-242ac6f9-00b2-4413-9a56-88c712c0138b tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] Flavor pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 863.132665] env[62503]: DEBUG nova.virt.hardware [None req-242ac6f9-00b2-4413-9a56-88c712c0138b tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] Image pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 863.132811] env[62503]: DEBUG nova.virt.hardware [None req-242ac6f9-00b2-4413-9a56-88c712c0138b tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 863.133035] env[62503]: DEBUG nova.virt.hardware [None req-242ac6f9-00b2-4413-9a56-88c712c0138b tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 863.133206] env[62503]: DEBUG nova.virt.hardware [None req-242ac6f9-00b2-4413-9a56-88c712c0138b tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 863.133375] env[62503]: DEBUG nova.virt.hardware [None req-242ac6f9-00b2-4413-9a56-88c712c0138b tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] Got 1 possible topologies {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 863.133537] env[62503]: DEBUG nova.virt.hardware [None req-242ac6f9-00b2-4413-9a56-88c712c0138b tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 863.133705] env[62503]: DEBUG nova.virt.hardware [None req-242ac6f9-00b2-4413-9a56-88c712c0138b tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 863.134575] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7356194b-c1fb-4428-9af1-7cabb1dc741a {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.142942] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82ca6a83-5921-4031-a19b-9aed8ddad8b6 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.326302] env[62503]: DEBUG nova.network.neutron [None req-578ece02-e979-44e0-9e11-eefd26357b90 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: c5f2cc73-6bcd-4422-890b-3299d4cf4534] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 863.415900] env[62503]: DEBUG nova.network.neutron [None req-578ece02-e979-44e0-9e11-eefd26357b90 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: c5f2cc73-6bcd-4422-890b-3299d4cf4534] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 863.442011] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c04d84e-a748-4f0d-a02d-24b25c22cff0 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.451499] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33d048ca-ac78-4766-8bfb-bfc99b5808bc {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.480974] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6624131-b5a5-4d88-9f12-da0ed219a70f {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.488270] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef582a6e-3efc-4836-9f0b-437d096f9e60 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.501357] env[62503]: DEBUG nova.compute.provider_tree [None req-66c68936-6590-4907-b875-c3ea4a73524c tempest-ServersNegativeTestJSON-1133423638 tempest-ServersNegativeTestJSON-1133423638-project-member] Updating inventory in ProviderTree for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 863.522373] env[62503]: DEBUG oslo_vmware.api [None req-35e1767c-0254-4b7e-b241-cbaf9c357109 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Task: {'id': task-1387791, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.061601} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 863.522671] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-35e1767c-0254-4b7e-b241-cbaf9c357109 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: 9ccdc727-536e-4db8-bad4-960858254758] Extended root virtual disk {{(pid=62503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 863.523357] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6de6fb84-6f63-4c35-a85e-a9e081b3e407 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.525686] env[62503]: DEBUG nova.compute.manager [None req-a01f7696-ade1-4d90-9188-a82a8c2369af tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: c9129f68-c755-4b78-b067-b77b01048c02] Starting instance... {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 863.547268] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-35e1767c-0254-4b7e-b241-cbaf9c357109 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: 9ccdc727-536e-4db8-bad4-960858254758] Reconfiguring VM instance instance-00000044 to attach disk [datastore1] 9ccdc727-536e-4db8-bad4-960858254758/9ccdc727-536e-4db8-bad4-960858254758.vmdk or device None with type sparse {{(pid=62503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 863.549676] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2ff6db8f-db74-4dff-a8a5-6d0196f60151 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.566916] env[62503]: DEBUG oslo_vmware.api [None req-35e1767c-0254-4b7e-b241-cbaf9c357109 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Waiting for the task: (returnval){ [ 863.566916] env[62503]: value = "task-1387792" [ 863.566916] env[62503]: _type = "Task" [ 863.566916] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 863.574708] env[62503]: DEBUG oslo_vmware.api [None req-35e1767c-0254-4b7e-b241-cbaf9c357109 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Task: {'id': task-1387792, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.771339] env[62503]: DEBUG nova.compute.manager [req-dcb60e04-dfc5-4745-9f72-2913977b2d01 req-32cc2d99-7a2d-4920-9697-1cb6209c6648 service nova] [instance: 6229dda6-90e8-457b-beb3-2107e3700b29] Received event network-vif-plugged-dd4c60a2-b788-4d39-a0ca-b15676519c29 {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 863.771339] env[62503]: DEBUG oslo_concurrency.lockutils [req-dcb60e04-dfc5-4745-9f72-2913977b2d01 req-32cc2d99-7a2d-4920-9697-1cb6209c6648 service nova] Acquiring lock "6229dda6-90e8-457b-beb3-2107e3700b29-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 863.771339] env[62503]: DEBUG oslo_concurrency.lockutils [req-dcb60e04-dfc5-4745-9f72-2913977b2d01 req-32cc2d99-7a2d-4920-9697-1cb6209c6648 service nova] Lock "6229dda6-90e8-457b-beb3-2107e3700b29-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 863.771339] env[62503]: DEBUG oslo_concurrency.lockutils [req-dcb60e04-dfc5-4745-9f72-2913977b2d01 req-32cc2d99-7a2d-4920-9697-1cb6209c6648 service nova] Lock "6229dda6-90e8-457b-beb3-2107e3700b29-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 863.771339] env[62503]: DEBUG nova.compute.manager [req-dcb60e04-dfc5-4745-9f72-2913977b2d01 req-32cc2d99-7a2d-4920-9697-1cb6209c6648 service nova] [instance: 6229dda6-90e8-457b-beb3-2107e3700b29] No waiting events found dispatching network-vif-plugged-dd4c60a2-b788-4d39-a0ca-b15676519c29 {{(pid=62503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 863.771939] env[62503]: WARNING nova.compute.manager [req-dcb60e04-dfc5-4745-9f72-2913977b2d01 req-32cc2d99-7a2d-4920-9697-1cb6209c6648 service nova] [instance: 6229dda6-90e8-457b-beb3-2107e3700b29] Received unexpected event network-vif-plugged-dd4c60a2-b788-4d39-a0ca-b15676519c29 for instance with vm_state building and task_state spawning. [ 863.918063] env[62503]: DEBUG oslo_concurrency.lockutils [None req-578ece02-e979-44e0-9e11-eefd26357b90 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Releasing lock "refresh_cache-c5f2cc73-6bcd-4422-890b-3299d4cf4534" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 863.918323] env[62503]: DEBUG nova.compute.manager [None req-578ece02-e979-44e0-9e11-eefd26357b90 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62503) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3032}} [ 863.918515] env[62503]: DEBUG nova.compute.manager [None req-578ece02-e979-44e0-9e11-eefd26357b90 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: c5f2cc73-6bcd-4422-890b-3299d4cf4534] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 863.918686] env[62503]: DEBUG nova.network.neutron [None req-578ece02-e979-44e0-9e11-eefd26357b90 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: c5f2cc73-6bcd-4422-890b-3299d4cf4534] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 863.938286] env[62503]: DEBUG nova.network.neutron [None req-578ece02-e979-44e0-9e11-eefd26357b90 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: c5f2cc73-6bcd-4422-890b-3299d4cf4534] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 863.965656] env[62503]: DEBUG nova.network.neutron [None req-242ac6f9-00b2-4413-9a56-88c712c0138b tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] [instance: 6229dda6-90e8-457b-beb3-2107e3700b29] Successfully updated port: dd4c60a2-b788-4d39-a0ca-b15676519c29 {{(pid=62503) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 864.023782] env[62503]: ERROR nova.scheduler.client.report [None req-66c68936-6590-4907-b875-c3ea4a73524c tempest-ServersNegativeTestJSON-1133423638 tempest-ServersNegativeTestJSON-1133423638-project-member] [req-daf92529-23d4-423a-80bf-198470aa99af] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-daf92529-23d4-423a-80bf-198470aa99af"}]}: nova.exception.PortBindingFailed: Binding failed for port 630e253e-07e4-4fde-87cb-26c06ff7dd5e, please check neutron logs for more information. [ 864.039447] env[62503]: DEBUG nova.scheduler.client.report [None req-66c68936-6590-4907-b875-c3ea4a73524c tempest-ServersNegativeTestJSON-1133423638 tempest-ServersNegativeTestJSON-1133423638-project-member] Refreshing inventories for resource provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:819}} [ 864.044698] env[62503]: DEBUG oslo_concurrency.lockutils [None req-a01f7696-ade1-4d90-9188-a82a8c2369af tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 864.054389] env[62503]: DEBUG nova.scheduler.client.report [None req-66c68936-6590-4907-b875-c3ea4a73524c tempest-ServersNegativeTestJSON-1133423638 tempest-ServersNegativeTestJSON-1133423638-project-member] Updating ProviderTree inventory for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:783}} [ 864.054607] env[62503]: DEBUG nova.compute.provider_tree [None req-66c68936-6590-4907-b875-c3ea4a73524c tempest-ServersNegativeTestJSON-1133423638 tempest-ServersNegativeTestJSON-1133423638-project-member] Updating inventory in ProviderTree for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 864.073670] env[62503]: DEBUG nova.scheduler.client.report [None req-66c68936-6590-4907-b875-c3ea4a73524c tempest-ServersNegativeTestJSON-1133423638 tempest-ServersNegativeTestJSON-1133423638-project-member] Refreshing aggregate associations for resource provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2, aggregates: None {{(pid=62503) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:828}} [ 864.079429] env[62503]: DEBUG oslo_vmware.api [None req-35e1767c-0254-4b7e-b241-cbaf9c357109 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Task: {'id': task-1387792, 'name': ReconfigVM_Task, 'duration_secs': 0.292344} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 864.079695] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-35e1767c-0254-4b7e-b241-cbaf9c357109 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: 9ccdc727-536e-4db8-bad4-960858254758] Reconfigured VM instance instance-00000044 to attach disk [datastore1] 9ccdc727-536e-4db8-bad4-960858254758/9ccdc727-536e-4db8-bad4-960858254758.vmdk or device None with type sparse {{(pid=62503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 864.080383] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f326a8d5-e4d5-4d5d-a2c5-7d08ddec1494 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.086857] env[62503]: DEBUG oslo_vmware.api [None req-35e1767c-0254-4b7e-b241-cbaf9c357109 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Waiting for the task: (returnval){ [ 864.086857] env[62503]: value = "task-1387793" [ 864.086857] env[62503]: _type = "Task" [ 864.086857] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 864.103015] env[62503]: DEBUG oslo_vmware.api [None req-35e1767c-0254-4b7e-b241-cbaf9c357109 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Task: {'id': task-1387793, 'name': Rename_Task} progress is 6%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.112417] env[62503]: DEBUG nova.scheduler.client.report [None req-66c68936-6590-4907-b875-c3ea4a73524c tempest-ServersNegativeTestJSON-1133423638 tempest-ServersNegativeTestJSON-1133423638-project-member] Refreshing trait associations for resource provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2, traits: HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=62503) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:840}} [ 864.245224] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e32c966-e4d9-4ad0-ac3c-c42ea9b61ee8 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.252125] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3547556-bb28-4d1f-9106-c84960174570 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.282349] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-263721db-1f7b-4c28-9dce-482a54519bfd {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.289342] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08f574d8-bd1c-4dc9-be7c-91b51da47e8c {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.301888] env[62503]: DEBUG nova.compute.provider_tree [None req-66c68936-6590-4907-b875-c3ea4a73524c tempest-ServersNegativeTestJSON-1133423638 tempest-ServersNegativeTestJSON-1133423638-project-member] Updating inventory in ProviderTree for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 864.440442] env[62503]: DEBUG nova.network.neutron [None req-578ece02-e979-44e0-9e11-eefd26357b90 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: c5f2cc73-6bcd-4422-890b-3299d4cf4534] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 864.602339] env[62503]: DEBUG oslo_vmware.api [None req-35e1767c-0254-4b7e-b241-cbaf9c357109 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Task: {'id': task-1387793, 'name': Rename_Task, 'duration_secs': 0.140164} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 864.602690] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-35e1767c-0254-4b7e-b241-cbaf9c357109 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: 9ccdc727-536e-4db8-bad4-960858254758] Powering on the VM {{(pid=62503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 864.602842] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-39c3e8b1-c697-463a-a1db-820261de9b0c {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.609618] env[62503]: DEBUG oslo_vmware.api [None req-35e1767c-0254-4b7e-b241-cbaf9c357109 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Waiting for the task: (returnval){ [ 864.609618] env[62503]: value = "task-1387794" [ 864.609618] env[62503]: _type = "Task" [ 864.609618] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 864.617027] env[62503]: DEBUG oslo_vmware.api [None req-35e1767c-0254-4b7e-b241-cbaf9c357109 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Task: {'id': task-1387794, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.839635] env[62503]: DEBUG nova.scheduler.client.report [None req-66c68936-6590-4907-b875-c3ea4a73524c tempest-ServersNegativeTestJSON-1133423638 tempest-ServersNegativeTestJSON-1133423638-project-member] Updated inventory for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 with generation 85 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:972}} [ 864.839902] env[62503]: DEBUG nova.compute.provider_tree [None req-66c68936-6590-4907-b875-c3ea4a73524c tempest-ServersNegativeTestJSON-1133423638 tempest-ServersNegativeTestJSON-1133423638-project-member] Updating resource provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 generation from 85 to 86 during operation: update_inventory {{(pid=62503) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 864.840125] env[62503]: DEBUG nova.compute.provider_tree [None req-66c68936-6590-4907-b875-c3ea4a73524c tempest-ServersNegativeTestJSON-1133423638 tempest-ServersNegativeTestJSON-1133423638-project-member] Updating inventory in ProviderTree for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 864.943223] env[62503]: INFO nova.compute.manager [None req-578ece02-e979-44e0-9e11-eefd26357b90 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: c5f2cc73-6bcd-4422-890b-3299d4cf4534] Took 1.02 seconds to deallocate network for instance. [ 865.120198] env[62503]: DEBUG oslo_vmware.api [None req-35e1767c-0254-4b7e-b241-cbaf9c357109 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Task: {'id': task-1387794, 'name': PowerOnVM_Task, 'duration_secs': 0.463454} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 865.120539] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-35e1767c-0254-4b7e-b241-cbaf9c357109 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: 9ccdc727-536e-4db8-bad4-960858254758] Powered on the VM {{(pid=62503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 865.120862] env[62503]: INFO nova.compute.manager [None req-35e1767c-0254-4b7e-b241-cbaf9c357109 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: 9ccdc727-536e-4db8-bad4-960858254758] Took 8.96 seconds to spawn the instance on the hypervisor. [ 865.121167] env[62503]: DEBUG nova.compute.manager [None req-35e1767c-0254-4b7e-b241-cbaf9c357109 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: 9ccdc727-536e-4db8-bad4-960858254758] Checking state {{(pid=62503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1794}} [ 865.121987] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f2c7f70-3fac-45e0-8c50-4fb1cd674808 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.346421] env[62503]: DEBUG oslo_concurrency.lockutils [None req-66c68936-6590-4907-b875-c3ea4a73524c tempest-ServersNegativeTestJSON-1133423638 tempest-ServersNegativeTestJSON-1133423638-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.546s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 865.347090] env[62503]: ERROR nova.compute.manager [None req-66c68936-6590-4907-b875-c3ea4a73524c tempest-ServersNegativeTestJSON-1133423638 tempest-ServersNegativeTestJSON-1133423638-project-member] [instance: 1e355e38-60c6-4e7f-beb4-160c4527ec51] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 630e253e-07e4-4fde-87cb-26c06ff7dd5e, please check neutron logs for more information. [ 865.347090] env[62503]: ERROR nova.compute.manager [instance: 1e355e38-60c6-4e7f-beb4-160c4527ec51] Traceback (most recent call last): [ 865.347090] env[62503]: ERROR nova.compute.manager [instance: 1e355e38-60c6-4e7f-beb4-160c4527ec51] File "/opt/stack/nova/nova/compute/manager.py", line 2644, in _build_and_run_instance [ 865.347090] env[62503]: ERROR nova.compute.manager [instance: 1e355e38-60c6-4e7f-beb4-160c4527ec51] self.driver.spawn(context, instance, image_meta, [ 865.347090] env[62503]: ERROR nova.compute.manager [instance: 1e355e38-60c6-4e7f-beb4-160c4527ec51] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 865.347090] env[62503]: ERROR nova.compute.manager [instance: 1e355e38-60c6-4e7f-beb4-160c4527ec51] self._vmops.spawn(context, instance, image_meta, injected_files, [ 865.347090] env[62503]: ERROR nova.compute.manager [instance: 1e355e38-60c6-4e7f-beb4-160c4527ec51] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 865.347090] env[62503]: ERROR nova.compute.manager [instance: 1e355e38-60c6-4e7f-beb4-160c4527ec51] vm_ref = self.build_virtual_machine(instance, [ 865.347090] env[62503]: ERROR nova.compute.manager [instance: 1e355e38-60c6-4e7f-beb4-160c4527ec51] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 865.347090] env[62503]: ERROR nova.compute.manager [instance: 1e355e38-60c6-4e7f-beb4-160c4527ec51] vif_infos = vmwarevif.get_vif_info(self._session, [ 865.347090] env[62503]: ERROR nova.compute.manager [instance: 1e355e38-60c6-4e7f-beb4-160c4527ec51] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 865.347432] env[62503]: ERROR nova.compute.manager [instance: 1e355e38-60c6-4e7f-beb4-160c4527ec51] for vif in network_info: [ 865.347432] env[62503]: ERROR nova.compute.manager [instance: 1e355e38-60c6-4e7f-beb4-160c4527ec51] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 865.347432] env[62503]: ERROR nova.compute.manager [instance: 1e355e38-60c6-4e7f-beb4-160c4527ec51] return self._sync_wrapper(fn, *args, **kwargs) [ 865.347432] env[62503]: ERROR nova.compute.manager [instance: 1e355e38-60c6-4e7f-beb4-160c4527ec51] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 865.347432] env[62503]: ERROR nova.compute.manager [instance: 1e355e38-60c6-4e7f-beb4-160c4527ec51] self.wait() [ 865.347432] env[62503]: ERROR nova.compute.manager [instance: 1e355e38-60c6-4e7f-beb4-160c4527ec51] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 865.347432] env[62503]: ERROR nova.compute.manager [instance: 1e355e38-60c6-4e7f-beb4-160c4527ec51] self[:] = self._gt.wait() [ 865.347432] env[62503]: ERROR nova.compute.manager [instance: 1e355e38-60c6-4e7f-beb4-160c4527ec51] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 865.347432] env[62503]: ERROR nova.compute.manager [instance: 1e355e38-60c6-4e7f-beb4-160c4527ec51] return self._exit_event.wait() [ 865.347432] env[62503]: ERROR nova.compute.manager [instance: 1e355e38-60c6-4e7f-beb4-160c4527ec51] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 865.347432] env[62503]: ERROR nova.compute.manager [instance: 1e355e38-60c6-4e7f-beb4-160c4527ec51] current.throw(*self._exc) [ 865.347432] env[62503]: ERROR nova.compute.manager [instance: 1e355e38-60c6-4e7f-beb4-160c4527ec51] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 865.347432] env[62503]: ERROR nova.compute.manager [instance: 1e355e38-60c6-4e7f-beb4-160c4527ec51] result = function(*args, **kwargs) [ 865.347814] env[62503]: ERROR nova.compute.manager [instance: 1e355e38-60c6-4e7f-beb4-160c4527ec51] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 865.347814] env[62503]: ERROR nova.compute.manager [instance: 1e355e38-60c6-4e7f-beb4-160c4527ec51] return func(*args, **kwargs) [ 865.347814] env[62503]: ERROR nova.compute.manager [instance: 1e355e38-60c6-4e7f-beb4-160c4527ec51] File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 865.347814] env[62503]: ERROR nova.compute.manager [instance: 1e355e38-60c6-4e7f-beb4-160c4527ec51] raise e [ 865.347814] env[62503]: ERROR nova.compute.manager [instance: 1e355e38-60c6-4e7f-beb4-160c4527ec51] File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 865.347814] env[62503]: ERROR nova.compute.manager [instance: 1e355e38-60c6-4e7f-beb4-160c4527ec51] nwinfo = self.network_api.allocate_for_instance( [ 865.347814] env[62503]: ERROR nova.compute.manager [instance: 1e355e38-60c6-4e7f-beb4-160c4527ec51] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 865.347814] env[62503]: ERROR nova.compute.manager [instance: 1e355e38-60c6-4e7f-beb4-160c4527ec51] created_port_ids = self._update_ports_for_instance( [ 865.347814] env[62503]: ERROR nova.compute.manager [instance: 1e355e38-60c6-4e7f-beb4-160c4527ec51] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 865.347814] env[62503]: ERROR nova.compute.manager [instance: 1e355e38-60c6-4e7f-beb4-160c4527ec51] with excutils.save_and_reraise_exception(): [ 865.347814] env[62503]: ERROR nova.compute.manager [instance: 1e355e38-60c6-4e7f-beb4-160c4527ec51] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 865.347814] env[62503]: ERROR nova.compute.manager [instance: 1e355e38-60c6-4e7f-beb4-160c4527ec51] self.force_reraise() [ 865.347814] env[62503]: ERROR nova.compute.manager [instance: 1e355e38-60c6-4e7f-beb4-160c4527ec51] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 865.348192] env[62503]: ERROR nova.compute.manager [instance: 1e355e38-60c6-4e7f-beb4-160c4527ec51] raise self.value [ 865.348192] env[62503]: ERROR nova.compute.manager [instance: 1e355e38-60c6-4e7f-beb4-160c4527ec51] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 865.348192] env[62503]: ERROR nova.compute.manager [instance: 1e355e38-60c6-4e7f-beb4-160c4527ec51] updated_port = self._update_port( [ 865.348192] env[62503]: ERROR nova.compute.manager [instance: 1e355e38-60c6-4e7f-beb4-160c4527ec51] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 865.348192] env[62503]: ERROR nova.compute.manager [instance: 1e355e38-60c6-4e7f-beb4-160c4527ec51] _ensure_no_port_binding_failure(port) [ 865.348192] env[62503]: ERROR nova.compute.manager [instance: 1e355e38-60c6-4e7f-beb4-160c4527ec51] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 865.348192] env[62503]: ERROR nova.compute.manager [instance: 1e355e38-60c6-4e7f-beb4-160c4527ec51] raise exception.PortBindingFailed(port_id=port['id']) [ 865.348192] env[62503]: ERROR nova.compute.manager [instance: 1e355e38-60c6-4e7f-beb4-160c4527ec51] nova.exception.PortBindingFailed: Binding failed for port 630e253e-07e4-4fde-87cb-26c06ff7dd5e, please check neutron logs for more information. [ 865.348192] env[62503]: ERROR nova.compute.manager [instance: 1e355e38-60c6-4e7f-beb4-160c4527ec51] [ 865.348192] env[62503]: DEBUG nova.compute.utils [None req-66c68936-6590-4907-b875-c3ea4a73524c tempest-ServersNegativeTestJSON-1133423638 tempest-ServersNegativeTestJSON-1133423638-project-member] [instance: 1e355e38-60c6-4e7f-beb4-160c4527ec51] Binding failed for port 630e253e-07e4-4fde-87cb-26c06ff7dd5e, please check neutron logs for more information. {{(pid=62503) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 865.349037] env[62503]: DEBUG oslo_concurrency.lockutils [None req-ca6b327a-4e4e-486e-a0f1-74ffee8f5abc tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.808s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 865.350480] env[62503]: INFO nova.compute.claims [None req-ca6b327a-4e4e-486e-a0f1-74ffee8f5abc tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] [instance: cf611345-d276-4745-a2f8-0551c9dca2c2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 865.352994] env[62503]: DEBUG nova.compute.manager [None req-66c68936-6590-4907-b875-c3ea4a73524c tempest-ServersNegativeTestJSON-1133423638 tempest-ServersNegativeTestJSON-1133423638-project-member] [instance: 1e355e38-60c6-4e7f-beb4-160c4527ec51] Build of instance 1e355e38-60c6-4e7f-beb4-160c4527ec51 was re-scheduled: Binding failed for port 630e253e-07e4-4fde-87cb-26c06ff7dd5e, please check neutron logs for more information. {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2483}} [ 865.353431] env[62503]: DEBUG nova.compute.manager [None req-66c68936-6590-4907-b875-c3ea4a73524c tempest-ServersNegativeTestJSON-1133423638 tempest-ServersNegativeTestJSON-1133423638-project-member] [instance: 1e355e38-60c6-4e7f-beb4-160c4527ec51] Unplugging VIFs for instance {{(pid=62503) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3009}} [ 865.353654] env[62503]: DEBUG oslo_concurrency.lockutils [None req-66c68936-6590-4907-b875-c3ea4a73524c tempest-ServersNegativeTestJSON-1133423638 tempest-ServersNegativeTestJSON-1133423638-project-member] Acquiring lock "refresh_cache-1e355e38-60c6-4e7f-beb4-160c4527ec51" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 865.353801] env[62503]: DEBUG oslo_concurrency.lockutils [None req-66c68936-6590-4907-b875-c3ea4a73524c tempest-ServersNegativeTestJSON-1133423638 tempest-ServersNegativeTestJSON-1133423638-project-member] Acquired lock "refresh_cache-1e355e38-60c6-4e7f-beb4-160c4527ec51" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 865.353962] env[62503]: DEBUG nova.network.neutron [None req-66c68936-6590-4907-b875-c3ea4a73524c tempest-ServersNegativeTestJSON-1133423638 tempest-ServersNegativeTestJSON-1133423638-project-member] [instance: 1e355e38-60c6-4e7f-beb4-160c4527ec51] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 865.639090] env[62503]: INFO nova.compute.manager [None req-35e1767c-0254-4b7e-b241-cbaf9c357109 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: 9ccdc727-536e-4db8-bad4-960858254758] Took 31.89 seconds to build instance. [ 865.802097] env[62503]: DEBUG nova.compute.manager [req-f99abc97-4084-4c14-9f09-40da8c324ab5 req-425a47d1-af80-4489-a11b-a138e0390559 service nova] [instance: 6229dda6-90e8-457b-beb3-2107e3700b29] Received event network-changed-dd4c60a2-b788-4d39-a0ca-b15676519c29 {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 865.802303] env[62503]: DEBUG nova.compute.manager [req-f99abc97-4084-4c14-9f09-40da8c324ab5 req-425a47d1-af80-4489-a11b-a138e0390559 service nova] [instance: 6229dda6-90e8-457b-beb3-2107e3700b29] Refreshing instance network info cache due to event network-changed-dd4c60a2-b788-4d39-a0ca-b15676519c29. {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11432}} [ 865.802470] env[62503]: DEBUG oslo_concurrency.lockutils [req-f99abc97-4084-4c14-9f09-40da8c324ab5 req-425a47d1-af80-4489-a11b-a138e0390559 service nova] Acquiring lock "refresh_cache-6229dda6-90e8-457b-beb3-2107e3700b29" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 865.802733] env[62503]: DEBUG oslo_concurrency.lockutils [req-f99abc97-4084-4c14-9f09-40da8c324ab5 req-425a47d1-af80-4489-a11b-a138e0390559 service nova] Acquired lock "refresh_cache-6229dda6-90e8-457b-beb3-2107e3700b29" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 865.802798] env[62503]: DEBUG nova.network.neutron [req-f99abc97-4084-4c14-9f09-40da8c324ab5 req-425a47d1-af80-4489-a11b-a138e0390559 service nova] [instance: 6229dda6-90e8-457b-beb3-2107e3700b29] Refreshing network info cache for port dd4c60a2-b788-4d39-a0ca-b15676519c29 {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 865.873789] env[62503]: DEBUG nova.network.neutron [None req-66c68936-6590-4907-b875-c3ea4a73524c tempest-ServersNegativeTestJSON-1133423638 tempest-ServersNegativeTestJSON-1133423638-project-member] [instance: 1e355e38-60c6-4e7f-beb4-160c4527ec51] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 865.971634] env[62503]: DEBUG nova.network.neutron [None req-66c68936-6590-4907-b875-c3ea4a73524c tempest-ServersNegativeTestJSON-1133423638 tempest-ServersNegativeTestJSON-1133423638-project-member] [instance: 1e355e38-60c6-4e7f-beb4-160c4527ec51] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 865.983178] env[62503]: INFO nova.scheduler.client.report [None req-578ece02-e979-44e0-9e11-eefd26357b90 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Deleted allocations for instance c5f2cc73-6bcd-4422-890b-3299d4cf4534 [ 866.140571] env[62503]: DEBUG oslo_concurrency.lockutils [None req-35e1767c-0254-4b7e-b241-cbaf9c357109 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Lock "9ccdc727-536e-4db8-bad4-960858254758" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 61.727s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 866.283136] env[62503]: DEBUG nova.network.neutron [None req-242ac6f9-00b2-4413-9a56-88c712c0138b tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] [instance: 6229dda6-90e8-457b-beb3-2107e3700b29] Successfully updated port: e4a6a500-8bcd-461e-8408-055dbdd07fe1 {{(pid=62503) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 866.352512] env[62503]: DEBUG nova.network.neutron [req-f99abc97-4084-4c14-9f09-40da8c324ab5 req-425a47d1-af80-4489-a11b-a138e0390559 service nova] [instance: 6229dda6-90e8-457b-beb3-2107e3700b29] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 866.477569] env[62503]: DEBUG oslo_concurrency.lockutils [None req-66c68936-6590-4907-b875-c3ea4a73524c tempest-ServersNegativeTestJSON-1133423638 tempest-ServersNegativeTestJSON-1133423638-project-member] Releasing lock "refresh_cache-1e355e38-60c6-4e7f-beb4-160c4527ec51" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 866.478787] env[62503]: DEBUG nova.compute.manager [None req-66c68936-6590-4907-b875-c3ea4a73524c tempest-ServersNegativeTestJSON-1133423638 tempest-ServersNegativeTestJSON-1133423638-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62503) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3032}} [ 866.478787] env[62503]: DEBUG nova.compute.manager [None req-66c68936-6590-4907-b875-c3ea4a73524c tempest-ServersNegativeTestJSON-1133423638 tempest-ServersNegativeTestJSON-1133423638-project-member] [instance: 1e355e38-60c6-4e7f-beb4-160c4527ec51] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 866.478787] env[62503]: DEBUG nova.network.neutron [None req-66c68936-6590-4907-b875-c3ea4a73524c tempest-ServersNegativeTestJSON-1133423638 tempest-ServersNegativeTestJSON-1133423638-project-member] [instance: 1e355e38-60c6-4e7f-beb4-160c4527ec51] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 866.493941] env[62503]: DEBUG nova.network.neutron [req-f99abc97-4084-4c14-9f09-40da8c324ab5 req-425a47d1-af80-4489-a11b-a138e0390559 service nova] [instance: 6229dda6-90e8-457b-beb3-2107e3700b29] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 866.501235] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc729216-b075-4750-af45-9bf8378bdd0e {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.504427] env[62503]: DEBUG nova.network.neutron [None req-66c68936-6590-4907-b875-c3ea4a73524c tempest-ServersNegativeTestJSON-1133423638 tempest-ServersNegativeTestJSON-1133423638-project-member] [instance: 1e355e38-60c6-4e7f-beb4-160c4527ec51] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 866.510230] env[62503]: DEBUG oslo_concurrency.lockutils [req-f99abc97-4084-4c14-9f09-40da8c324ab5 req-425a47d1-af80-4489-a11b-a138e0390559 service nova] Releasing lock "refresh_cache-6229dda6-90e8-457b-beb3-2107e3700b29" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 866.510401] env[62503]: DEBUG oslo_concurrency.lockutils [None req-578ece02-e979-44e0-9e11-eefd26357b90 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Lock "c5f2cc73-6bcd-4422-890b-3299d4cf4534" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 91.229s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 866.516043] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ad7f4d2-e5d3-45a2-9d16-db2c4e264fdf {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.548902] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2c05cc9-86cc-4e1f-b9e8-0aa7abf17808 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.556172] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8d42945-a1fd-46ae-ae24-d85430eccbcd {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.570785] env[62503]: DEBUG nova.compute.provider_tree [None req-ca6b327a-4e4e-486e-a0f1-74ffee8f5abc tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 866.786947] env[62503]: DEBUG oslo_concurrency.lockutils [None req-242ac6f9-00b2-4413-9a56-88c712c0138b tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] Acquiring lock "refresh_cache-6229dda6-90e8-457b-beb3-2107e3700b29" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 866.786947] env[62503]: DEBUG oslo_concurrency.lockutils [None req-242ac6f9-00b2-4413-9a56-88c712c0138b tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] Acquired lock "refresh_cache-6229dda6-90e8-457b-beb3-2107e3700b29" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 866.786947] env[62503]: DEBUG nova.network.neutron [None req-242ac6f9-00b2-4413-9a56-88c712c0138b tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] [instance: 6229dda6-90e8-457b-beb3-2107e3700b29] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 867.011753] env[62503]: DEBUG nova.network.neutron [None req-66c68936-6590-4907-b875-c3ea4a73524c tempest-ServersNegativeTestJSON-1133423638 tempest-ServersNegativeTestJSON-1133423638-project-member] [instance: 1e355e38-60c6-4e7f-beb4-160c4527ec51] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 867.077081] env[62503]: DEBUG nova.scheduler.client.report [None req-ca6b327a-4e4e-486e-a0f1-74ffee8f5abc tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 867.335255] env[62503]: DEBUG nova.network.neutron [None req-242ac6f9-00b2-4413-9a56-88c712c0138b tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] [instance: 6229dda6-90e8-457b-beb3-2107e3700b29] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 867.516038] env[62503]: INFO nova.compute.manager [None req-66c68936-6590-4907-b875-c3ea4a73524c tempest-ServersNegativeTestJSON-1133423638 tempest-ServersNegativeTestJSON-1133423638-project-member] [instance: 1e355e38-60c6-4e7f-beb4-160c4527ec51] Took 1.04 seconds to deallocate network for instance. [ 867.582174] env[62503]: DEBUG oslo_concurrency.lockutils [None req-ca6b327a-4e4e-486e-a0f1-74ffee8f5abc tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.233s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 867.582732] env[62503]: DEBUG nova.compute.manager [None req-ca6b327a-4e4e-486e-a0f1-74ffee8f5abc tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] [instance: cf611345-d276-4745-a2f8-0551c9dca2c2] Start building networks asynchronously for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2832}} [ 867.586108] env[62503]: DEBUG oslo_concurrency.lockutils [None req-d5cf39b4-4dfb-4634-a37c-ca8390f95f74 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.755s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 867.606903] env[62503]: DEBUG nova.compute.manager [None req-2ba47ee0-3aae-44b8-a61f-079e3e8e4a13 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: 9ccdc727-536e-4db8-bad4-960858254758] Checking state {{(pid=62503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1794}} [ 867.607868] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c158d3c-857f-4a6e-975b-5214b8b5214a {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.841940] env[62503]: DEBUG nova.compute.manager [req-6fe1272b-f05f-49e3-9f39-a5be11995a25 req-863ce484-8b96-422b-a912-07af95473607 service nova] [instance: 6229dda6-90e8-457b-beb3-2107e3700b29] Received event network-vif-plugged-e4a6a500-8bcd-461e-8408-055dbdd07fe1 {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 867.845906] env[62503]: DEBUG oslo_concurrency.lockutils [req-6fe1272b-f05f-49e3-9f39-a5be11995a25 req-863ce484-8b96-422b-a912-07af95473607 service nova] Acquiring lock "6229dda6-90e8-457b-beb3-2107e3700b29-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 867.846187] env[62503]: DEBUG oslo_concurrency.lockutils [req-6fe1272b-f05f-49e3-9f39-a5be11995a25 req-863ce484-8b96-422b-a912-07af95473607 service nova] Lock "6229dda6-90e8-457b-beb3-2107e3700b29-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.004s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 867.846367] env[62503]: DEBUG oslo_concurrency.lockutils [req-6fe1272b-f05f-49e3-9f39-a5be11995a25 req-863ce484-8b96-422b-a912-07af95473607 service nova] Lock "6229dda6-90e8-457b-beb3-2107e3700b29-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 867.846543] env[62503]: DEBUG nova.compute.manager [req-6fe1272b-f05f-49e3-9f39-a5be11995a25 req-863ce484-8b96-422b-a912-07af95473607 service nova] [instance: 6229dda6-90e8-457b-beb3-2107e3700b29] No waiting events found dispatching network-vif-plugged-e4a6a500-8bcd-461e-8408-055dbdd07fe1 {{(pid=62503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 867.846707] env[62503]: WARNING nova.compute.manager [req-6fe1272b-f05f-49e3-9f39-a5be11995a25 req-863ce484-8b96-422b-a912-07af95473607 service nova] [instance: 6229dda6-90e8-457b-beb3-2107e3700b29] Received unexpected event network-vif-plugged-e4a6a500-8bcd-461e-8408-055dbdd07fe1 for instance with vm_state building and task_state spawning. [ 867.846867] env[62503]: DEBUG nova.compute.manager [req-6fe1272b-f05f-49e3-9f39-a5be11995a25 req-863ce484-8b96-422b-a912-07af95473607 service nova] [instance: 6229dda6-90e8-457b-beb3-2107e3700b29] Received event network-changed-e4a6a500-8bcd-461e-8408-055dbdd07fe1 {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 867.847019] env[62503]: DEBUG nova.compute.manager [req-6fe1272b-f05f-49e3-9f39-a5be11995a25 req-863ce484-8b96-422b-a912-07af95473607 service nova] [instance: 6229dda6-90e8-457b-beb3-2107e3700b29] Refreshing instance network info cache due to event network-changed-e4a6a500-8bcd-461e-8408-055dbdd07fe1. {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11432}} [ 867.847189] env[62503]: DEBUG oslo_concurrency.lockutils [req-6fe1272b-f05f-49e3-9f39-a5be11995a25 req-863ce484-8b96-422b-a912-07af95473607 service nova] Acquiring lock "refresh_cache-6229dda6-90e8-457b-beb3-2107e3700b29" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 867.893133] env[62503]: DEBUG nova.network.neutron [None req-242ac6f9-00b2-4413-9a56-88c712c0138b tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] [instance: 6229dda6-90e8-457b-beb3-2107e3700b29] Updating instance_info_cache with network_info: [{"id": "dd4c60a2-b788-4d39-a0ca-b15676519c29", "address": "fa:16:3e:b8:45:04", "network": {"id": "c367d900-775e-4eae-9597-cdf2a4f28d2d", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1915575104", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.245", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a5f8c12d03a0446988d5335c00cee0ff", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0721b358-3768-472d-95f8-6d6755ab1635", "external-id": "nsx-vlan-transportzone-314", "segmentation_id": 314, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdd4c60a2-b7", "ovs_interfaceid": "dd4c60a2-b788-4d39-a0ca-b15676519c29", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "e4a6a500-8bcd-461e-8408-055dbdd07fe1", "address": "fa:16:3e:f0:ef:5a", "network": {"id": "21a0dca4-d82a-47f0-82ad-173cdb98c822", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-928622145", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.142", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "a5f8c12d03a0446988d5335c00cee0ff", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "54495d8d-2696-4f65-b925-e567abdc205f", "external-id": "nsx-vlan-transportzone-220", "segmentation_id": 220, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape4a6a500-8b", "ovs_interfaceid": "e4a6a500-8bcd-461e-8408-055dbdd07fe1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 868.092119] env[62503]: DEBUG nova.compute.utils [None req-ca6b327a-4e4e-486e-a0f1-74ffee8f5abc tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] Using /dev/sd instead of None {{(pid=62503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 868.096630] env[62503]: DEBUG nova.compute.manager [None req-ca6b327a-4e4e-486e-a0f1-74ffee8f5abc tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] [instance: cf611345-d276-4745-a2f8-0551c9dca2c2] Allocating IP information in the background. {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 868.096800] env[62503]: DEBUG nova.network.neutron [None req-ca6b327a-4e4e-486e-a0f1-74ffee8f5abc tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] [instance: cf611345-d276-4745-a2f8-0551c9dca2c2] allocate_for_instance() {{(pid=62503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 868.122429] env[62503]: INFO nova.compute.manager [None req-2ba47ee0-3aae-44b8-a61f-079e3e8e4a13 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: 9ccdc727-536e-4db8-bad4-960858254758] instance snapshotting [ 868.125472] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e50f98f-a68b-467f-8ae3-00ae380a46e6 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.161509] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ece4fa91-7814-47cd-9962-9ed4561b5866 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.181963] env[62503]: DEBUG nova.policy [None req-ca6b327a-4e4e-486e-a0f1-74ffee8f5abc tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9d22ef1366854b6cad3923e38ca93241', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd404225d6f9c46148e0b7080ec1eee99', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62503) authorize /opt/stack/nova/nova/policy.py:201}} [ 868.278634] env[62503]: DEBUG oslo_concurrency.lockutils [None req-d3b4e233-4266-4cfe-a227-3bff2b6257fb tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Acquiring lock "16167e53-e45b-4b37-90c6-ab2c30ebf1aa" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 868.278745] env[62503]: DEBUG oslo_concurrency.lockutils [None req-d3b4e233-4266-4cfe-a227-3bff2b6257fb tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Lock "16167e53-e45b-4b37-90c6-ab2c30ebf1aa" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 868.316314] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-220b34c6-6237-49ab-9ebd-dceee1c0fe85 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.324198] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dba0b224-f1a4-4491-86ec-df1dfae248f3 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.362622] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95e9e79e-064e-4ceb-92fa-48bf1a148800 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.370125] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60b085aa-da36-4b66-a96e-9bad42b45b8b {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.385136] env[62503]: DEBUG nova.compute.provider_tree [None req-d5cf39b4-4dfb-4634-a37c-ca8390f95f74 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 868.396239] env[62503]: DEBUG oslo_concurrency.lockutils [None req-242ac6f9-00b2-4413-9a56-88c712c0138b tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] Releasing lock "refresh_cache-6229dda6-90e8-457b-beb3-2107e3700b29" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 868.396675] env[62503]: DEBUG nova.compute.manager [None req-242ac6f9-00b2-4413-9a56-88c712c0138b tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] [instance: 6229dda6-90e8-457b-beb3-2107e3700b29] Instance network_info: |[{"id": "dd4c60a2-b788-4d39-a0ca-b15676519c29", "address": "fa:16:3e:b8:45:04", "network": {"id": "c367d900-775e-4eae-9597-cdf2a4f28d2d", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1915575104", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.245", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a5f8c12d03a0446988d5335c00cee0ff", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0721b358-3768-472d-95f8-6d6755ab1635", "external-id": "nsx-vlan-transportzone-314", "segmentation_id": 314, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdd4c60a2-b7", "ovs_interfaceid": "dd4c60a2-b788-4d39-a0ca-b15676519c29", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "e4a6a500-8bcd-461e-8408-055dbdd07fe1", "address": "fa:16:3e:f0:ef:5a", "network": {"id": "21a0dca4-d82a-47f0-82ad-173cdb98c822", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-928622145", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.142", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "a5f8c12d03a0446988d5335c00cee0ff", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "54495d8d-2696-4f65-b925-e567abdc205f", "external-id": "nsx-vlan-transportzone-220", "segmentation_id": 220, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape4a6a500-8b", "ovs_interfaceid": "e4a6a500-8bcd-461e-8408-055dbdd07fe1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1999}} [ 868.396935] env[62503]: DEBUG oslo_concurrency.lockutils [req-6fe1272b-f05f-49e3-9f39-a5be11995a25 req-863ce484-8b96-422b-a912-07af95473607 service nova] Acquired lock "refresh_cache-6229dda6-90e8-457b-beb3-2107e3700b29" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 868.397113] env[62503]: DEBUG nova.network.neutron [req-6fe1272b-f05f-49e3-9f39-a5be11995a25 req-863ce484-8b96-422b-a912-07af95473607 service nova] [instance: 6229dda6-90e8-457b-beb3-2107e3700b29] Refreshing network info cache for port e4a6a500-8bcd-461e-8408-055dbdd07fe1 {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 868.398581] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-242ac6f9-00b2-4413-9a56-88c712c0138b tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] [instance: 6229dda6-90e8-457b-beb3-2107e3700b29] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b8:45:04', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0721b358-3768-472d-95f8-6d6755ab1635', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'dd4c60a2-b788-4d39-a0ca-b15676519c29', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:f0:ef:5a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '54495d8d-2696-4f65-b925-e567abdc205f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e4a6a500-8bcd-461e-8408-055dbdd07fe1', 'vif_model': 'vmxnet3'}] {{(pid=62503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 868.407414] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-242ac6f9-00b2-4413-9a56-88c712c0138b tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] Creating folder: Project (a5f8c12d03a0446988d5335c00cee0ff). Parent ref: group-v294540. {{(pid=62503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 868.408790] env[62503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b6c42915-a4aa-4ef6-89ac-5882c258e1f6 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.419068] env[62503]: INFO nova.virt.vmwareapi.vm_util [None req-242ac6f9-00b2-4413-9a56-88c712c0138b tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] Created folder: Project (a5f8c12d03a0446988d5335c00cee0ff) in parent group-v294540. [ 868.419068] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-242ac6f9-00b2-4413-9a56-88c712c0138b tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] Creating folder: Instances. Parent ref: group-v294572. {{(pid=62503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 868.419289] env[62503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9a94c9e8-b9fb-4016-b33c-7f12eb77fd82 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.427754] env[62503]: INFO nova.virt.vmwareapi.vm_util [None req-242ac6f9-00b2-4413-9a56-88c712c0138b tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] Created folder: Instances in parent group-v294572. [ 868.428231] env[62503]: DEBUG oslo.service.loopingcall [None req-242ac6f9-00b2-4413-9a56-88c712c0138b tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 868.428231] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6229dda6-90e8-457b-beb3-2107e3700b29] Creating VM on the ESX host {{(pid=62503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 868.428402] env[62503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0ca5a806-e08a-49e5-b5cc-664c98c28f3a {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.448652] env[62503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 868.448652] env[62503]: value = "task-1387797" [ 868.448652] env[62503]: _type = "Task" [ 868.448652] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.456086] env[62503]: DEBUG oslo_vmware.api [-] Task: {'id': task-1387797, 'name': CreateVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.549075] env[62503]: INFO nova.scheduler.client.report [None req-66c68936-6590-4907-b875-c3ea4a73524c tempest-ServersNegativeTestJSON-1133423638 tempest-ServersNegativeTestJSON-1133423638-project-member] Deleted allocations for instance 1e355e38-60c6-4e7f-beb4-160c4527ec51 [ 868.555024] env[62503]: DEBUG nova.network.neutron [None req-ca6b327a-4e4e-486e-a0f1-74ffee8f5abc tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] [instance: cf611345-d276-4745-a2f8-0551c9dca2c2] Successfully created port: 6a859ef4-58a2-46ba-9e2e-b0857df9cfea {{(pid=62503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 868.600635] env[62503]: DEBUG nova.compute.manager [None req-ca6b327a-4e4e-486e-a0f1-74ffee8f5abc tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] [instance: cf611345-d276-4745-a2f8-0551c9dca2c2] Start building block device mappings for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2867}} [ 868.674945] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-2ba47ee0-3aae-44b8-a61f-079e3e8e4a13 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: 9ccdc727-536e-4db8-bad4-960858254758] Creating Snapshot of the VM instance {{(pid=62503) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 868.675323] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-500ad762-0091-4f9a-8975-1a24207e115f {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.682979] env[62503]: DEBUG oslo_vmware.api [None req-2ba47ee0-3aae-44b8-a61f-079e3e8e4a13 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Waiting for the task: (returnval){ [ 868.682979] env[62503]: value = "task-1387798" [ 868.682979] env[62503]: _type = "Task" [ 868.682979] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.690797] env[62503]: DEBUG oslo_vmware.api [None req-2ba47ee0-3aae-44b8-a61f-079e3e8e4a13 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Task: {'id': task-1387798, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.781677] env[62503]: DEBUG nova.compute.manager [None req-d3b4e233-4266-4cfe-a227-3bff2b6257fb tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: 16167e53-e45b-4b37-90c6-ab2c30ebf1aa] Starting instance... {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 868.890474] env[62503]: DEBUG nova.scheduler.client.report [None req-d5cf39b4-4dfb-4634-a37c-ca8390f95f74 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 868.964404] env[62503]: DEBUG oslo_vmware.api [-] Task: {'id': task-1387797, 'name': CreateVM_Task} progress is 25%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.059943] env[62503]: DEBUG oslo_concurrency.lockutils [None req-66c68936-6590-4907-b875-c3ea4a73524c tempest-ServersNegativeTestJSON-1133423638 tempest-ServersNegativeTestJSON-1133423638-project-member] Lock "1e355e38-60c6-4e7f-beb4-160c4527ec51" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 92.620s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 869.154742] env[62503]: DEBUG nova.network.neutron [req-6fe1272b-f05f-49e3-9f39-a5be11995a25 req-863ce484-8b96-422b-a912-07af95473607 service nova] [instance: 6229dda6-90e8-457b-beb3-2107e3700b29] Updated VIF entry in instance network info cache for port e4a6a500-8bcd-461e-8408-055dbdd07fe1. {{(pid=62503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 869.155266] env[62503]: DEBUG nova.network.neutron [req-6fe1272b-f05f-49e3-9f39-a5be11995a25 req-863ce484-8b96-422b-a912-07af95473607 service nova] [instance: 6229dda6-90e8-457b-beb3-2107e3700b29] Updating instance_info_cache with network_info: [{"id": "dd4c60a2-b788-4d39-a0ca-b15676519c29", "address": "fa:16:3e:b8:45:04", "network": {"id": "c367d900-775e-4eae-9597-cdf2a4f28d2d", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1915575104", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.245", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a5f8c12d03a0446988d5335c00cee0ff", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0721b358-3768-472d-95f8-6d6755ab1635", "external-id": "nsx-vlan-transportzone-314", "segmentation_id": 314, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdd4c60a2-b7", "ovs_interfaceid": "dd4c60a2-b788-4d39-a0ca-b15676519c29", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "e4a6a500-8bcd-461e-8408-055dbdd07fe1", "address": "fa:16:3e:f0:ef:5a", "network": {"id": "21a0dca4-d82a-47f0-82ad-173cdb98c822", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-928622145", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.142", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "a5f8c12d03a0446988d5335c00cee0ff", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "54495d8d-2696-4f65-b925-e567abdc205f", "external-id": "nsx-vlan-transportzone-220", "segmentation_id": 220, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape4a6a500-8b", "ovs_interfaceid": "e4a6a500-8bcd-461e-8408-055dbdd07fe1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 869.194114] env[62503]: DEBUG oslo_vmware.api [None req-2ba47ee0-3aae-44b8-a61f-079e3e8e4a13 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Task: {'id': task-1387798, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.317891] env[62503]: DEBUG oslo_concurrency.lockutils [None req-d3b4e233-4266-4cfe-a227-3bff2b6257fb tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 869.397104] env[62503]: DEBUG oslo_concurrency.lockutils [None req-d5cf39b4-4dfb-4634-a37c-ca8390f95f74 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.811s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 869.397489] env[62503]: ERROR nova.compute.manager [None req-d5cf39b4-4dfb-4634-a37c-ca8390f95f74 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] [instance: 04b9ed30-2cd0-4c07-9141-76f0f53fefb4] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 595f989b-b0f0-4115-a28f-c984f19ae80d, please check neutron logs for more information. [ 869.397489] env[62503]: ERROR nova.compute.manager [instance: 04b9ed30-2cd0-4c07-9141-76f0f53fefb4] Traceback (most recent call last): [ 869.397489] env[62503]: ERROR nova.compute.manager [instance: 04b9ed30-2cd0-4c07-9141-76f0f53fefb4] File "/opt/stack/nova/nova/compute/manager.py", line 2644, in _build_and_run_instance [ 869.397489] env[62503]: ERROR nova.compute.manager [instance: 04b9ed30-2cd0-4c07-9141-76f0f53fefb4] self.driver.spawn(context, instance, image_meta, [ 869.397489] env[62503]: ERROR nova.compute.manager [instance: 04b9ed30-2cd0-4c07-9141-76f0f53fefb4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 869.397489] env[62503]: ERROR nova.compute.manager [instance: 04b9ed30-2cd0-4c07-9141-76f0f53fefb4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 869.397489] env[62503]: ERROR nova.compute.manager [instance: 04b9ed30-2cd0-4c07-9141-76f0f53fefb4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 869.397489] env[62503]: ERROR nova.compute.manager [instance: 04b9ed30-2cd0-4c07-9141-76f0f53fefb4] vm_ref = self.build_virtual_machine(instance, [ 869.397489] env[62503]: ERROR nova.compute.manager [instance: 04b9ed30-2cd0-4c07-9141-76f0f53fefb4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 869.397489] env[62503]: ERROR nova.compute.manager [instance: 04b9ed30-2cd0-4c07-9141-76f0f53fefb4] vif_infos = vmwarevif.get_vif_info(self._session, [ 869.397489] env[62503]: ERROR nova.compute.manager [instance: 04b9ed30-2cd0-4c07-9141-76f0f53fefb4] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 869.397489] env[62503]: ERROR nova.compute.manager [instance: 04b9ed30-2cd0-4c07-9141-76f0f53fefb4] for vif in network_info: [ 869.397489] env[62503]: ERROR nova.compute.manager [instance: 04b9ed30-2cd0-4c07-9141-76f0f53fefb4] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 869.397489] env[62503]: ERROR nova.compute.manager [instance: 04b9ed30-2cd0-4c07-9141-76f0f53fefb4] return self._sync_wrapper(fn, *args, **kwargs) [ 869.397489] env[62503]: ERROR nova.compute.manager [instance: 04b9ed30-2cd0-4c07-9141-76f0f53fefb4] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 869.397489] env[62503]: ERROR nova.compute.manager [instance: 04b9ed30-2cd0-4c07-9141-76f0f53fefb4] self.wait() [ 869.397489] env[62503]: ERROR nova.compute.manager [instance: 04b9ed30-2cd0-4c07-9141-76f0f53fefb4] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 869.397489] env[62503]: ERROR nova.compute.manager [instance: 04b9ed30-2cd0-4c07-9141-76f0f53fefb4] self[:] = self._gt.wait() [ 869.397489] env[62503]: ERROR nova.compute.manager [instance: 04b9ed30-2cd0-4c07-9141-76f0f53fefb4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 869.397489] env[62503]: ERROR nova.compute.manager [instance: 04b9ed30-2cd0-4c07-9141-76f0f53fefb4] return self._exit_event.wait() [ 869.397489] env[62503]: ERROR nova.compute.manager [instance: 04b9ed30-2cd0-4c07-9141-76f0f53fefb4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 869.397489] env[62503]: ERROR nova.compute.manager [instance: 04b9ed30-2cd0-4c07-9141-76f0f53fefb4] current.throw(*self._exc) [ 869.397489] env[62503]: ERROR nova.compute.manager [instance: 04b9ed30-2cd0-4c07-9141-76f0f53fefb4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 869.397489] env[62503]: ERROR nova.compute.manager [instance: 04b9ed30-2cd0-4c07-9141-76f0f53fefb4] result = function(*args, **kwargs) [ 869.397489] env[62503]: ERROR nova.compute.manager [instance: 04b9ed30-2cd0-4c07-9141-76f0f53fefb4] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 869.397489] env[62503]: ERROR nova.compute.manager [instance: 04b9ed30-2cd0-4c07-9141-76f0f53fefb4] return func(*args, **kwargs) [ 869.397489] env[62503]: ERROR nova.compute.manager [instance: 04b9ed30-2cd0-4c07-9141-76f0f53fefb4] File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 869.397489] env[62503]: ERROR nova.compute.manager [instance: 04b9ed30-2cd0-4c07-9141-76f0f53fefb4] raise e [ 869.397489] env[62503]: ERROR nova.compute.manager [instance: 04b9ed30-2cd0-4c07-9141-76f0f53fefb4] File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 869.397489] env[62503]: ERROR nova.compute.manager [instance: 04b9ed30-2cd0-4c07-9141-76f0f53fefb4] nwinfo = self.network_api.allocate_for_instance( [ 869.397489] env[62503]: ERROR nova.compute.manager [instance: 04b9ed30-2cd0-4c07-9141-76f0f53fefb4] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 869.397489] env[62503]: ERROR nova.compute.manager [instance: 04b9ed30-2cd0-4c07-9141-76f0f53fefb4] created_port_ids = self._update_ports_for_instance( [ 869.397489] env[62503]: ERROR nova.compute.manager [instance: 04b9ed30-2cd0-4c07-9141-76f0f53fefb4] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 869.397489] env[62503]: ERROR nova.compute.manager [instance: 04b9ed30-2cd0-4c07-9141-76f0f53fefb4] with excutils.save_and_reraise_exception(): [ 869.397489] env[62503]: ERROR nova.compute.manager [instance: 04b9ed30-2cd0-4c07-9141-76f0f53fefb4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 869.397489] env[62503]: ERROR nova.compute.manager [instance: 04b9ed30-2cd0-4c07-9141-76f0f53fefb4] self.force_reraise() [ 869.397489] env[62503]: ERROR nova.compute.manager [instance: 04b9ed30-2cd0-4c07-9141-76f0f53fefb4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 869.397489] env[62503]: ERROR nova.compute.manager [instance: 04b9ed30-2cd0-4c07-9141-76f0f53fefb4] raise self.value [ 869.398613] env[62503]: ERROR nova.compute.manager [instance: 04b9ed30-2cd0-4c07-9141-76f0f53fefb4] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 869.398613] env[62503]: ERROR nova.compute.manager [instance: 04b9ed30-2cd0-4c07-9141-76f0f53fefb4] updated_port = self._update_port( [ 869.398613] env[62503]: ERROR nova.compute.manager [instance: 04b9ed30-2cd0-4c07-9141-76f0f53fefb4] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 869.398613] env[62503]: ERROR nova.compute.manager [instance: 04b9ed30-2cd0-4c07-9141-76f0f53fefb4] _ensure_no_port_binding_failure(port) [ 869.398613] env[62503]: ERROR nova.compute.manager [instance: 04b9ed30-2cd0-4c07-9141-76f0f53fefb4] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 869.398613] env[62503]: ERROR nova.compute.manager [instance: 04b9ed30-2cd0-4c07-9141-76f0f53fefb4] raise exception.PortBindingFailed(port_id=port['id']) [ 869.398613] env[62503]: ERROR nova.compute.manager [instance: 04b9ed30-2cd0-4c07-9141-76f0f53fefb4] nova.exception.PortBindingFailed: Binding failed for port 595f989b-b0f0-4115-a28f-c984f19ae80d, please check neutron logs for more information. [ 869.398613] env[62503]: ERROR nova.compute.manager [instance: 04b9ed30-2cd0-4c07-9141-76f0f53fefb4] [ 869.398613] env[62503]: DEBUG nova.compute.utils [None req-d5cf39b4-4dfb-4634-a37c-ca8390f95f74 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] [instance: 04b9ed30-2cd0-4c07-9141-76f0f53fefb4] Binding failed for port 595f989b-b0f0-4115-a28f-c984f19ae80d, please check neutron logs for more information. {{(pid=62503) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 869.400148] env[62503]: DEBUG oslo_concurrency.lockutils [None req-fc728101-2fc5-4ffa-a37b-1d922196d83a tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.919s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 869.401587] env[62503]: INFO nova.compute.claims [None req-fc728101-2fc5-4ffa-a37b-1d922196d83a tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: b6fddb0d-70f5-433f-a0ef-0d6bffb35579] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 869.404367] env[62503]: DEBUG nova.compute.manager [None req-d5cf39b4-4dfb-4634-a37c-ca8390f95f74 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] [instance: 04b9ed30-2cd0-4c07-9141-76f0f53fefb4] Build of instance 04b9ed30-2cd0-4c07-9141-76f0f53fefb4 was re-scheduled: Binding failed for port 595f989b-b0f0-4115-a28f-c984f19ae80d, please check neutron logs for more information. {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2483}} [ 869.404815] env[62503]: DEBUG nova.compute.manager [None req-d5cf39b4-4dfb-4634-a37c-ca8390f95f74 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] [instance: 04b9ed30-2cd0-4c07-9141-76f0f53fefb4] Unplugging VIFs for instance {{(pid=62503) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3009}} [ 869.406239] env[62503]: DEBUG oslo_concurrency.lockutils [None req-d5cf39b4-4dfb-4634-a37c-ca8390f95f74 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] Acquiring lock "refresh_cache-04b9ed30-2cd0-4c07-9141-76f0f53fefb4" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 869.406397] env[62503]: DEBUG oslo_concurrency.lockutils [None req-d5cf39b4-4dfb-4634-a37c-ca8390f95f74 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] Acquired lock "refresh_cache-04b9ed30-2cd0-4c07-9141-76f0f53fefb4" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 869.406695] env[62503]: DEBUG nova.network.neutron [None req-d5cf39b4-4dfb-4634-a37c-ca8390f95f74 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] [instance: 04b9ed30-2cd0-4c07-9141-76f0f53fefb4] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 869.459911] env[62503]: DEBUG oslo_vmware.api [-] Task: {'id': task-1387797, 'name': CreateVM_Task, 'duration_secs': 0.78342} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.460138] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6229dda6-90e8-457b-beb3-2107e3700b29] Created VM on the ESX host {{(pid=62503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 869.460961] env[62503]: DEBUG oslo_concurrency.lockutils [None req-242ac6f9-00b2-4413-9a56-88c712c0138b tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 869.461232] env[62503]: DEBUG oslo_concurrency.lockutils [None req-242ac6f9-00b2-4413-9a56-88c712c0138b tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] Acquired lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 869.461495] env[62503]: DEBUG oslo_concurrency.lockutils [None req-242ac6f9-00b2-4413-9a56-88c712c0138b tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 869.461744] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2bae963e-a680-4242-8b54-99f8a6efc3b8 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.466502] env[62503]: DEBUG oslo_vmware.api [None req-242ac6f9-00b2-4413-9a56-88c712c0138b tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] Waiting for the task: (returnval){ [ 869.466502] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]525894e3-6aa8-5ad6-135d-0db6555a4787" [ 869.466502] env[62503]: _type = "Task" [ 869.466502] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.476797] env[62503]: DEBUG oslo_vmware.api [None req-242ac6f9-00b2-4413-9a56-88c712c0138b tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]525894e3-6aa8-5ad6-135d-0db6555a4787, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.612704] env[62503]: DEBUG nova.compute.manager [None req-ca6b327a-4e4e-486e-a0f1-74ffee8f5abc tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] [instance: cf611345-d276-4745-a2f8-0551c9dca2c2] Start spawning the instance on the hypervisor. {{(pid=62503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2641}} [ 869.633946] env[62503]: DEBUG nova.virt.hardware [None req-ca6b327a-4e4e-486e-a0f1-74ffee8f5abc tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:26:20Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:26:03Z,direct_url=,disk_format='vmdk',id=8150ca02-f879-471d-8913-459408f127a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26d9ee75d25b4018b8bb1fb11c8bd98c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:26:04Z,virtual_size=,visibility=), allow threads: False {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 869.634259] env[62503]: DEBUG nova.virt.hardware [None req-ca6b327a-4e4e-486e-a0f1-74ffee8f5abc tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] Flavor limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 869.634369] env[62503]: DEBUG nova.virt.hardware [None req-ca6b327a-4e4e-486e-a0f1-74ffee8f5abc tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] Image limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 869.634560] env[62503]: DEBUG nova.virt.hardware [None req-ca6b327a-4e4e-486e-a0f1-74ffee8f5abc tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] Flavor pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 869.635104] env[62503]: DEBUG nova.virt.hardware [None req-ca6b327a-4e4e-486e-a0f1-74ffee8f5abc tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] Image pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 869.635104] env[62503]: DEBUG nova.virt.hardware [None req-ca6b327a-4e4e-486e-a0f1-74ffee8f5abc tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 869.635698] env[62503]: DEBUG nova.virt.hardware [None req-ca6b327a-4e4e-486e-a0f1-74ffee8f5abc tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 869.635936] env[62503]: DEBUG nova.virt.hardware [None req-ca6b327a-4e4e-486e-a0f1-74ffee8f5abc tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 869.636148] env[62503]: DEBUG nova.virt.hardware [None req-ca6b327a-4e4e-486e-a0f1-74ffee8f5abc tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] Got 1 possible topologies {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 869.636330] env[62503]: DEBUG nova.virt.hardware [None req-ca6b327a-4e4e-486e-a0f1-74ffee8f5abc tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 869.636515] env[62503]: DEBUG nova.virt.hardware [None req-ca6b327a-4e4e-486e-a0f1-74ffee8f5abc tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 869.637411] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7d1fca6-8880-4831-b2c7-524077829358 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.646092] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b2cd2fe-2109-4797-a990-b1cd8787d7e4 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.660403] env[62503]: DEBUG oslo_concurrency.lockutils [req-6fe1272b-f05f-49e3-9f39-a5be11995a25 req-863ce484-8b96-422b-a912-07af95473607 service nova] Releasing lock "refresh_cache-6229dda6-90e8-457b-beb3-2107e3700b29" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 869.692565] env[62503]: DEBUG oslo_vmware.api [None req-2ba47ee0-3aae-44b8-a61f-079e3e8e4a13 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Task: {'id': task-1387798, 'name': CreateSnapshot_Task, 'duration_secs': 0.666175} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.692818] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-2ba47ee0-3aae-44b8-a61f-079e3e8e4a13 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: 9ccdc727-536e-4db8-bad4-960858254758] Created Snapshot of the VM instance {{(pid=62503) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 869.693638] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d3abd16-56b6-4775-ab30-cd7834b5d798 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.939368] env[62503]: DEBUG nova.network.neutron [None req-d5cf39b4-4dfb-4634-a37c-ca8390f95f74 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] [instance: 04b9ed30-2cd0-4c07-9141-76f0f53fefb4] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 869.983167] env[62503]: DEBUG oslo_vmware.api [None req-242ac6f9-00b2-4413-9a56-88c712c0138b tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]525894e3-6aa8-5ad6-135d-0db6555a4787, 'name': SearchDatastore_Task, 'duration_secs': 0.012938} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.983494] env[62503]: DEBUG oslo_concurrency.lockutils [None req-242ac6f9-00b2-4413-9a56-88c712c0138b tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] Releasing lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 869.983727] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-242ac6f9-00b2-4413-9a56-88c712c0138b tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] [instance: 6229dda6-90e8-457b-beb3-2107e3700b29] Processing image 8150ca02-f879-471d-8913-459408f127a1 {{(pid=62503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 869.984404] env[62503]: DEBUG oslo_concurrency.lockutils [None req-242ac6f9-00b2-4413-9a56-88c712c0138b tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 869.990017] env[62503]: DEBUG oslo_concurrency.lockutils [None req-242ac6f9-00b2-4413-9a56-88c712c0138b tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] Acquired lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 869.990017] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-242ac6f9-00b2-4413-9a56-88c712c0138b tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 869.990017] env[62503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0cbb81ef-4cba-4cbc-8519-d00ff9b02e90 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.999206] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-242ac6f9-00b2-4413-9a56-88c712c0138b tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 869.999206] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-242ac6f9-00b2-4413-9a56-88c712c0138b tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 869.999928] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5d0a0e00-021d-4b84-8855-2bfff430e2dc {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.005534] env[62503]: DEBUG oslo_vmware.api [None req-242ac6f9-00b2-4413-9a56-88c712c0138b tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] Waiting for the task: (returnval){ [ 870.005534] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]528be0d6-0bb3-9b91-ea0c-3724de00b86a" [ 870.005534] env[62503]: _type = "Task" [ 870.005534] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.016328] env[62503]: DEBUG oslo_vmware.api [None req-242ac6f9-00b2-4413-9a56-88c712c0138b tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]528be0d6-0bb3-9b91-ea0c-3724de00b86a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.216849] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-2ba47ee0-3aae-44b8-a61f-079e3e8e4a13 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: 9ccdc727-536e-4db8-bad4-960858254758] Creating linked-clone VM from snapshot {{(pid=62503) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 870.217198] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-24e1971f-af43-4041-b9fb-470173f581d7 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.227082] env[62503]: DEBUG oslo_vmware.api [None req-2ba47ee0-3aae-44b8-a61f-079e3e8e4a13 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Waiting for the task: (returnval){ [ 870.227082] env[62503]: value = "task-1387799" [ 870.227082] env[62503]: _type = "Task" [ 870.227082] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.239275] env[62503]: DEBUG oslo_vmware.api [None req-2ba47ee0-3aae-44b8-a61f-079e3e8e4a13 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Task: {'id': task-1387799, 'name': CloneVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.289822] env[62503]: DEBUG nova.network.neutron [None req-d5cf39b4-4dfb-4634-a37c-ca8390f95f74 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] [instance: 04b9ed30-2cd0-4c07-9141-76f0f53fefb4] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 870.331844] env[62503]: DEBUG nova.compute.manager [req-c2894969-029a-4504-b930-be155395c8f8 req-badfd621-1d4e-47e4-9c27-f9ecf97254ce service nova] [instance: cf611345-d276-4745-a2f8-0551c9dca2c2] Received event network-vif-plugged-6a859ef4-58a2-46ba-9e2e-b0857df9cfea {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 870.332074] env[62503]: DEBUG oslo_concurrency.lockutils [req-c2894969-029a-4504-b930-be155395c8f8 req-badfd621-1d4e-47e4-9c27-f9ecf97254ce service nova] Acquiring lock "cf611345-d276-4745-a2f8-0551c9dca2c2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 870.332289] env[62503]: DEBUG oslo_concurrency.lockutils [req-c2894969-029a-4504-b930-be155395c8f8 req-badfd621-1d4e-47e4-9c27-f9ecf97254ce service nova] Lock "cf611345-d276-4745-a2f8-0551c9dca2c2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 870.332449] env[62503]: DEBUG oslo_concurrency.lockutils [req-c2894969-029a-4504-b930-be155395c8f8 req-badfd621-1d4e-47e4-9c27-f9ecf97254ce service nova] Lock "cf611345-d276-4745-a2f8-0551c9dca2c2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 870.332701] env[62503]: DEBUG nova.compute.manager [req-c2894969-029a-4504-b930-be155395c8f8 req-badfd621-1d4e-47e4-9c27-f9ecf97254ce service nova] [instance: cf611345-d276-4745-a2f8-0551c9dca2c2] No waiting events found dispatching network-vif-plugged-6a859ef4-58a2-46ba-9e2e-b0857df9cfea {{(pid=62503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 870.332770] env[62503]: WARNING nova.compute.manager [req-c2894969-029a-4504-b930-be155395c8f8 req-badfd621-1d4e-47e4-9c27-f9ecf97254ce service nova] [instance: cf611345-d276-4745-a2f8-0551c9dca2c2] Received unexpected event network-vif-plugged-6a859ef4-58a2-46ba-9e2e-b0857df9cfea for instance with vm_state building and task_state spawning. [ 870.412894] env[62503]: DEBUG nova.network.neutron [None req-ca6b327a-4e4e-486e-a0f1-74ffee8f5abc tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] [instance: cf611345-d276-4745-a2f8-0551c9dca2c2] Successfully updated port: 6a859ef4-58a2-46ba-9e2e-b0857df9cfea {{(pid=62503) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 870.516829] env[62503]: DEBUG oslo_vmware.api [None req-242ac6f9-00b2-4413-9a56-88c712c0138b tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]528be0d6-0bb3-9b91-ea0c-3724de00b86a, 'name': SearchDatastore_Task, 'duration_secs': 0.008986} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.517909] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-903b3a9a-d13b-4f9a-a068-fc6ba4793b04 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.523259] env[62503]: DEBUG oslo_vmware.api [None req-242ac6f9-00b2-4413-9a56-88c712c0138b tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] Waiting for the task: (returnval){ [ 870.523259] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]522efee6-5b77-4d0b-0430-ffa749ace987" [ 870.523259] env[62503]: _type = "Task" [ 870.523259] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.535952] env[62503]: DEBUG oslo_vmware.api [None req-242ac6f9-00b2-4413-9a56-88c712c0138b tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]522efee6-5b77-4d0b-0430-ffa749ace987, 'name': SearchDatastore_Task, 'duration_secs': 0.008361} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.539045] env[62503]: DEBUG oslo_concurrency.lockutils [None req-242ac6f9-00b2-4413-9a56-88c712c0138b tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] Releasing lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 870.539964] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-242ac6f9-00b2-4413-9a56-88c712c0138b tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk to [datastore1] 6229dda6-90e8-457b-beb3-2107e3700b29/6229dda6-90e8-457b-beb3-2107e3700b29.vmdk {{(pid=62503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 870.543478] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1befa248-d279-4746-858b-535d8d27fca4 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.549979] env[62503]: DEBUG oslo_vmware.api [None req-242ac6f9-00b2-4413-9a56-88c712c0138b tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] Waiting for the task: (returnval){ [ 870.549979] env[62503]: value = "task-1387800" [ 870.549979] env[62503]: _type = "Task" [ 870.549979] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.557964] env[62503]: DEBUG oslo_vmware.api [None req-242ac6f9-00b2-4413-9a56-88c712c0138b tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] Task: {'id': task-1387800, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.589189] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f9c5a51-4cc0-408c-9cd4-2d3b0d88ca4f {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.597340] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-deaa1127-cfb6-489c-96ba-30209f614c11 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.632593] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-858e3d57-70c5-463c-83ec-1b0ce3d9d3fc {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.641222] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2153b49b-bea0-40f6-babc-4d89936f6d7a {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.654130] env[62503]: DEBUG nova.compute.provider_tree [None req-fc728101-2fc5-4ffa-a37b-1d922196d83a tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 870.738186] env[62503]: DEBUG oslo_vmware.api [None req-2ba47ee0-3aae-44b8-a61f-079e3e8e4a13 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Task: {'id': task-1387799, 'name': CloneVM_Task} progress is 94%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.793176] env[62503]: DEBUG oslo_concurrency.lockutils [None req-d5cf39b4-4dfb-4634-a37c-ca8390f95f74 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] Releasing lock "refresh_cache-04b9ed30-2cd0-4c07-9141-76f0f53fefb4" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 870.793519] env[62503]: DEBUG nova.compute.manager [None req-d5cf39b4-4dfb-4634-a37c-ca8390f95f74 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62503) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3032}} [ 870.793711] env[62503]: DEBUG nova.compute.manager [None req-d5cf39b4-4dfb-4634-a37c-ca8390f95f74 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] [instance: 04b9ed30-2cd0-4c07-9141-76f0f53fefb4] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 870.793881] env[62503]: DEBUG nova.network.neutron [None req-d5cf39b4-4dfb-4634-a37c-ca8390f95f74 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] [instance: 04b9ed30-2cd0-4c07-9141-76f0f53fefb4] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 870.812019] env[62503]: DEBUG nova.network.neutron [None req-d5cf39b4-4dfb-4634-a37c-ca8390f95f74 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] [instance: 04b9ed30-2cd0-4c07-9141-76f0f53fefb4] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 870.921376] env[62503]: DEBUG oslo_concurrency.lockutils [None req-ca6b327a-4e4e-486e-a0f1-74ffee8f5abc tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] Acquiring lock "refresh_cache-cf611345-d276-4745-a2f8-0551c9dca2c2" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 870.921376] env[62503]: DEBUG oslo_concurrency.lockutils [None req-ca6b327a-4e4e-486e-a0f1-74ffee8f5abc tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] Acquired lock "refresh_cache-cf611345-d276-4745-a2f8-0551c9dca2c2" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 870.921376] env[62503]: DEBUG nova.network.neutron [None req-ca6b327a-4e4e-486e-a0f1-74ffee8f5abc tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] [instance: cf611345-d276-4745-a2f8-0551c9dca2c2] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 871.074052] env[62503]: DEBUG oslo_vmware.api [None req-242ac6f9-00b2-4413-9a56-88c712c0138b tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] Task: {'id': task-1387800, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.496168} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 871.074052] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-242ac6f9-00b2-4413-9a56-88c712c0138b tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk to [datastore1] 6229dda6-90e8-457b-beb3-2107e3700b29/6229dda6-90e8-457b-beb3-2107e3700b29.vmdk {{(pid=62503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 871.074052] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-242ac6f9-00b2-4413-9a56-88c712c0138b tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] [instance: 6229dda6-90e8-457b-beb3-2107e3700b29] Extending root virtual disk to 1048576 {{(pid=62503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 871.074052] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-acde4d1d-9c54-4bc4-882e-1643713fd919 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.081419] env[62503]: DEBUG oslo_vmware.api [None req-242ac6f9-00b2-4413-9a56-88c712c0138b tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] Waiting for the task: (returnval){ [ 871.081419] env[62503]: value = "task-1387801" [ 871.081419] env[62503]: _type = "Task" [ 871.081419] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 871.090942] env[62503]: DEBUG oslo_vmware.api [None req-242ac6f9-00b2-4413-9a56-88c712c0138b tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] Task: {'id': task-1387801, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.161061] env[62503]: DEBUG nova.scheduler.client.report [None req-fc728101-2fc5-4ffa-a37b-1d922196d83a tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 871.237252] env[62503]: DEBUG oslo_vmware.api [None req-2ba47ee0-3aae-44b8-a61f-079e3e8e4a13 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Task: {'id': task-1387799, 'name': CloneVM_Task} progress is 94%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.315304] env[62503]: DEBUG nova.network.neutron [None req-d5cf39b4-4dfb-4634-a37c-ca8390f95f74 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] [instance: 04b9ed30-2cd0-4c07-9141-76f0f53fefb4] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 871.482535] env[62503]: DEBUG nova.network.neutron [None req-ca6b327a-4e4e-486e-a0f1-74ffee8f5abc tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] [instance: cf611345-d276-4745-a2f8-0551c9dca2c2] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 871.591175] env[62503]: DEBUG oslo_vmware.api [None req-242ac6f9-00b2-4413-9a56-88c712c0138b tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] Task: {'id': task-1387801, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.083077} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 871.591351] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-242ac6f9-00b2-4413-9a56-88c712c0138b tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] [instance: 6229dda6-90e8-457b-beb3-2107e3700b29] Extended root virtual disk {{(pid=62503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 871.592206] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b56ada8c-bd4f-438a-bf13-2f59e8a98a32 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.630854] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-242ac6f9-00b2-4413-9a56-88c712c0138b tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] [instance: 6229dda6-90e8-457b-beb3-2107e3700b29] Reconfiguring VM instance instance-00000045 to attach disk [datastore1] 6229dda6-90e8-457b-beb3-2107e3700b29/6229dda6-90e8-457b-beb3-2107e3700b29.vmdk or device None with type sparse {{(pid=62503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 871.631291] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-789cce49-3cad-4b29-9dce-dab9ea46e6ac {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.653532] env[62503]: DEBUG oslo_vmware.api [None req-242ac6f9-00b2-4413-9a56-88c712c0138b tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] Waiting for the task: (returnval){ [ 871.653532] env[62503]: value = "task-1387802" [ 871.653532] env[62503]: _type = "Task" [ 871.653532] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 871.662910] env[62503]: DEBUG oslo_vmware.api [None req-242ac6f9-00b2-4413-9a56-88c712c0138b tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] Task: {'id': task-1387802, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.664716] env[62503]: DEBUG oslo_concurrency.lockutils [None req-fc728101-2fc5-4ffa-a37b-1d922196d83a tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.265s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 871.665280] env[62503]: DEBUG nova.compute.manager [None req-fc728101-2fc5-4ffa-a37b-1d922196d83a tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: b6fddb0d-70f5-433f-a0ef-0d6bffb35579] Start building networks asynchronously for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2832}} [ 871.668235] env[62503]: DEBUG oslo_concurrency.lockutils [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 15.748s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 871.668617] env[62503]: DEBUG oslo_concurrency.lockutils [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 871.668687] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62503) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 871.669017] env[62503]: DEBUG oslo_concurrency.lockutils [None req-b540304e-d51f-4246-b92a-23f24c2fb495 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.890s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 871.672920] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b77921c-6122-4538-b62f-410584ddecc2 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.681860] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acf67f14-d0b4-4038-b20e-eafea56b0b3b {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.703129] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c8eba05-9e59-42e7-94f8-82da59032230 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.709186] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a188c931-0173-4d11-9ed0-e74fc27ef92e {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.745876] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181353MB free_disk=175GB free_vcpus=48 pci_devices=None {{(pid=62503) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 871.746058] env[62503]: DEBUG oslo_concurrency.lockutils [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 871.755956] env[62503]: DEBUG oslo_vmware.api [None req-2ba47ee0-3aae-44b8-a61f-079e3e8e4a13 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Task: {'id': task-1387799, 'name': CloneVM_Task} progress is 95%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.798082] env[62503]: DEBUG nova.network.neutron [None req-ca6b327a-4e4e-486e-a0f1-74ffee8f5abc tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] [instance: cf611345-d276-4745-a2f8-0551c9dca2c2] Updating instance_info_cache with network_info: [{"id": "6a859ef4-58a2-46ba-9e2e-b0857df9cfea", "address": "fa:16:3e:b1:7b:26", "network": {"id": "f6605088-4934-47d3-a3a3-7d24821eaddf", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1250737836-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d404225d6f9c46148e0b7080ec1eee99", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "16c6ea68-9b0e-4ac0-a484-7a9a40533017", "external-id": "nsx-vlan-transportzone-384", "segmentation_id": 384, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6a859ef4-58", "ovs_interfaceid": "6a859ef4-58a2-46ba-9e2e-b0857df9cfea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 871.816854] env[62503]: INFO nova.compute.manager [None req-d5cf39b4-4dfb-4634-a37c-ca8390f95f74 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] [instance: 04b9ed30-2cd0-4c07-9141-76f0f53fefb4] Took 1.02 seconds to deallocate network for instance. [ 872.164537] env[62503]: DEBUG oslo_vmware.api [None req-242ac6f9-00b2-4413-9a56-88c712c0138b tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] Task: {'id': task-1387802, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.173994] env[62503]: DEBUG nova.compute.utils [None req-fc728101-2fc5-4ffa-a37b-1d922196d83a tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Using /dev/sd instead of None {{(pid=62503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 872.175349] env[62503]: DEBUG nova.compute.manager [None req-fc728101-2fc5-4ffa-a37b-1d922196d83a tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: b6fddb0d-70f5-433f-a0ef-0d6bffb35579] Allocating IP information in the background. {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 872.175545] env[62503]: DEBUG nova.network.neutron [None req-fc728101-2fc5-4ffa-a37b-1d922196d83a tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: b6fddb0d-70f5-433f-a0ef-0d6bffb35579] allocate_for_instance() {{(pid=62503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 872.260410] env[62503]: DEBUG oslo_vmware.api [None req-2ba47ee0-3aae-44b8-a61f-079e3e8e4a13 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Task: {'id': task-1387799, 'name': CloneVM_Task, 'duration_secs': 1.723475} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 872.260410] env[62503]: DEBUG nova.policy [None req-fc728101-2fc5-4ffa-a37b-1d922196d83a tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b076e358f78e4874876f90d96fd612e5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e818e5ee9dc24efa96747c9558514a15', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62503) authorize /opt/stack/nova/nova/policy.py:201}} [ 872.262468] env[62503]: INFO nova.virt.vmwareapi.vmops [None req-2ba47ee0-3aae-44b8-a61f-079e3e8e4a13 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: 9ccdc727-536e-4db8-bad4-960858254758] Created linked-clone VM from snapshot [ 872.263124] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0bc2024-c944-476e-b188-31c037c505d4 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.273180] env[62503]: DEBUG nova.virt.vmwareapi.images [None req-2ba47ee0-3aae-44b8-a61f-079e3e8e4a13 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: 9ccdc727-536e-4db8-bad4-960858254758] Uploading image 3306da0e-1d43-4f11-be11-5fe7cf1194eb {{(pid=62503) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 872.300780] env[62503]: DEBUG oslo_concurrency.lockutils [None req-ca6b327a-4e4e-486e-a0f1-74ffee8f5abc tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] Releasing lock "refresh_cache-cf611345-d276-4745-a2f8-0551c9dca2c2" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 872.301203] env[62503]: DEBUG nova.compute.manager [None req-ca6b327a-4e4e-486e-a0f1-74ffee8f5abc tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] [instance: cf611345-d276-4745-a2f8-0551c9dca2c2] Instance network_info: |[{"id": "6a859ef4-58a2-46ba-9e2e-b0857df9cfea", "address": "fa:16:3e:b1:7b:26", "network": {"id": "f6605088-4934-47d3-a3a3-7d24821eaddf", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1250737836-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d404225d6f9c46148e0b7080ec1eee99", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "16c6ea68-9b0e-4ac0-a484-7a9a40533017", "external-id": "nsx-vlan-transportzone-384", "segmentation_id": 384, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6a859ef4-58", "ovs_interfaceid": "6a859ef4-58a2-46ba-9e2e-b0857df9cfea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1999}} [ 872.303873] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-ca6b327a-4e4e-486e-a0f1-74ffee8f5abc tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] [instance: cf611345-d276-4745-a2f8-0551c9dca2c2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b1:7b:26', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '16c6ea68-9b0e-4ac0-a484-7a9a40533017', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6a859ef4-58a2-46ba-9e2e-b0857df9cfea', 'vif_model': 'vmxnet3'}] {{(pid=62503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 872.312398] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca6b327a-4e4e-486e-a0f1-74ffee8f5abc tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] Creating folder: Project (d404225d6f9c46148e0b7080ec1eee99). Parent ref: group-v294540. {{(pid=62503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 872.314982] env[62503]: DEBUG oslo_vmware.rw_handles [None req-2ba47ee0-3aae-44b8-a61f-079e3e8e4a13 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 872.314982] env[62503]: value = "vm-294576" [ 872.314982] env[62503]: _type = "VirtualMachine" [ 872.314982] env[62503]: }. {{(pid=62503) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 872.315458] env[62503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6dd4c288-25a3-4f26-8291-7af5b388baec {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.317157] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-50dcb086-0df9-49aa-8a62-df8c47d88a27 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.327161] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9905d7d-94a6-4add-bd9c-18038d74f23e {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.330685] env[62503]: DEBUG oslo_vmware.rw_handles [None req-2ba47ee0-3aae-44b8-a61f-079e3e8e4a13 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Lease: (returnval){ [ 872.330685] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52e45bb6-2369-8baa-74d1-d25dab5ced40" [ 872.330685] env[62503]: _type = "HttpNfcLease" [ 872.330685] env[62503]: } obtained for exporting VM: (result){ [ 872.330685] env[62503]: value = "vm-294576" [ 872.330685] env[62503]: _type = "VirtualMachine" [ 872.330685] env[62503]: }. {{(pid=62503) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 872.330985] env[62503]: DEBUG oslo_vmware.api [None req-2ba47ee0-3aae-44b8-a61f-079e3e8e4a13 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Waiting for the lease: (returnval){ [ 872.330985] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52e45bb6-2369-8baa-74d1-d25dab5ced40" [ 872.330985] env[62503]: _type = "HttpNfcLease" [ 872.330985] env[62503]: } to be ready. {{(pid=62503) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 872.335782] env[62503]: INFO nova.virt.vmwareapi.vm_util [None req-ca6b327a-4e4e-486e-a0f1-74ffee8f5abc tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] Created folder: Project (d404225d6f9c46148e0b7080ec1eee99) in parent group-v294540. [ 872.335956] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca6b327a-4e4e-486e-a0f1-74ffee8f5abc tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] Creating folder: Instances. Parent ref: group-v294577. {{(pid=62503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 872.337803] env[62503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b39c522e-f00f-4f9e-81fa-8b3a2c208a00 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.342667] env[62503]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 872.342667] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52e45bb6-2369-8baa-74d1-d25dab5ced40" [ 872.342667] env[62503]: _type = "HttpNfcLease" [ 872.342667] env[62503]: } is initializing. {{(pid=62503) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 872.346109] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32851a08-6d4d-46c1-8930-84d2f13b3e12 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.349583] env[62503]: INFO nova.virt.vmwareapi.vm_util [None req-ca6b327a-4e4e-486e-a0f1-74ffee8f5abc tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] Created folder: Instances in parent group-v294577. [ 872.349583] env[62503]: DEBUG oslo.service.loopingcall [None req-ca6b327a-4e4e-486e-a0f1-74ffee8f5abc tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 872.349856] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cf611345-d276-4745-a2f8-0551c9dca2c2] Creating VM on the ESX host {{(pid=62503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 872.350069] env[62503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a489f9bb-0a72-465a-b4e6-9e4a44bc342d {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.392284] env[62503]: DEBUG nova.compute.manager [req-2f30e812-8da3-473d-8443-ea0a143a4396 req-bd157cc1-2c57-463e-9963-89cda051dd72 service nova] [instance: cf611345-d276-4745-a2f8-0551c9dca2c2] Received event network-changed-6a859ef4-58a2-46ba-9e2e-b0857df9cfea {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 872.392421] env[62503]: DEBUG nova.compute.manager [req-2f30e812-8da3-473d-8443-ea0a143a4396 req-bd157cc1-2c57-463e-9963-89cda051dd72 service nova] [instance: cf611345-d276-4745-a2f8-0551c9dca2c2] Refreshing instance network info cache due to event network-changed-6a859ef4-58a2-46ba-9e2e-b0857df9cfea. {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11432}} [ 872.392607] env[62503]: DEBUG oslo_concurrency.lockutils [req-2f30e812-8da3-473d-8443-ea0a143a4396 req-bd157cc1-2c57-463e-9963-89cda051dd72 service nova] Acquiring lock "refresh_cache-cf611345-d276-4745-a2f8-0551c9dca2c2" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 872.392799] env[62503]: DEBUG oslo_concurrency.lockutils [req-2f30e812-8da3-473d-8443-ea0a143a4396 req-bd157cc1-2c57-463e-9963-89cda051dd72 service nova] Acquired lock "refresh_cache-cf611345-d276-4745-a2f8-0551c9dca2c2" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 872.392972] env[62503]: DEBUG nova.network.neutron [req-2f30e812-8da3-473d-8443-ea0a143a4396 req-bd157cc1-2c57-463e-9963-89cda051dd72 service nova] [instance: cf611345-d276-4745-a2f8-0551c9dca2c2] Refreshing network info cache for port 6a859ef4-58a2-46ba-9e2e-b0857df9cfea {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 872.396752] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b37ab78-3730-4d65-b0ad-43aaca0e457c {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.401856] env[62503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 872.401856] env[62503]: value = "task-1387806" [ 872.401856] env[62503]: _type = "Task" [ 872.401856] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 872.409747] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-768a9495-6910-4228-95e0-c41b13e8aa81 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.416995] env[62503]: DEBUG oslo_vmware.api [-] Task: {'id': task-1387806, 'name': CreateVM_Task} progress is 5%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.427789] env[62503]: DEBUG nova.compute.provider_tree [None req-b540304e-d51f-4246-b92a-23f24c2fb495 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 872.666685] env[62503]: DEBUG oslo_vmware.api [None req-242ac6f9-00b2-4413-9a56-88c712c0138b tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] Task: {'id': task-1387802, 'name': ReconfigVM_Task, 'duration_secs': 0.861865} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 872.666973] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-242ac6f9-00b2-4413-9a56-88c712c0138b tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] [instance: 6229dda6-90e8-457b-beb3-2107e3700b29] Reconfigured VM instance instance-00000045 to attach disk [datastore1] 6229dda6-90e8-457b-beb3-2107e3700b29/6229dda6-90e8-457b-beb3-2107e3700b29.vmdk or device None with type sparse {{(pid=62503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 872.668944] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-df45745c-48c4-4c10-ab86-b41b58613c9b {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.676966] env[62503]: DEBUG oslo_vmware.api [None req-242ac6f9-00b2-4413-9a56-88c712c0138b tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] Waiting for the task: (returnval){ [ 872.676966] env[62503]: value = "task-1387807" [ 872.676966] env[62503]: _type = "Task" [ 872.676966] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 872.683210] env[62503]: DEBUG nova.compute.manager [None req-fc728101-2fc5-4ffa-a37b-1d922196d83a tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: b6fddb0d-70f5-433f-a0ef-0d6bffb35579] Start building block device mappings for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2867}} [ 872.695392] env[62503]: DEBUG oslo_vmware.api [None req-242ac6f9-00b2-4413-9a56-88c712c0138b tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] Task: {'id': task-1387807, 'name': Rename_Task} progress is 14%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.787756] env[62503]: DEBUG nova.network.neutron [None req-fc728101-2fc5-4ffa-a37b-1d922196d83a tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: b6fddb0d-70f5-433f-a0ef-0d6bffb35579] Successfully created port: 24ac3187-6729-47ea-beb6-4c96018b8a05 {{(pid=62503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 872.840478] env[62503]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 872.840478] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52e45bb6-2369-8baa-74d1-d25dab5ced40" [ 872.840478] env[62503]: _type = "HttpNfcLease" [ 872.840478] env[62503]: } is ready. {{(pid=62503) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 872.840823] env[62503]: DEBUG oslo_vmware.rw_handles [None req-2ba47ee0-3aae-44b8-a61f-079e3e8e4a13 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 872.840823] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52e45bb6-2369-8baa-74d1-d25dab5ced40" [ 872.840823] env[62503]: _type = "HttpNfcLease" [ 872.840823] env[62503]: }. {{(pid=62503) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 872.841988] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e58b076b-d0b8-4c69-b967-f0d2ecf94b94 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.849270] env[62503]: INFO nova.scheduler.client.report [None req-d5cf39b4-4dfb-4634-a37c-ca8390f95f74 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] Deleted allocations for instance 04b9ed30-2cd0-4c07-9141-76f0f53fefb4 [ 872.862444] env[62503]: DEBUG oslo_vmware.rw_handles [None req-2ba47ee0-3aae-44b8-a61f-079e3e8e4a13 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52c99989-1aa6-c782-b1f4-9bc32e794a53/disk-0.vmdk from lease info. {{(pid=62503) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 872.862655] env[62503]: DEBUG oslo_vmware.rw_handles [None req-2ba47ee0-3aae-44b8-a61f-079e3e8e4a13 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52c99989-1aa6-c782-b1f4-9bc32e794a53/disk-0.vmdk for reading. {{(pid=62503) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 872.933837] env[62503]: DEBUG nova.scheduler.client.report [None req-b540304e-d51f-4246-b92a-23f24c2fb495 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 872.942781] env[62503]: DEBUG oslo_vmware.api [-] Task: {'id': task-1387806, 'name': CreateVM_Task, 'duration_secs': 0.355925} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 872.942987] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cf611345-d276-4745-a2f8-0551c9dca2c2] Created VM on the ESX host {{(pid=62503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 872.943757] env[62503]: DEBUG oslo_concurrency.lockutils [None req-ca6b327a-4e4e-486e-a0f1-74ffee8f5abc tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 872.946645] env[62503]: DEBUG oslo_concurrency.lockutils [None req-ca6b327a-4e4e-486e-a0f1-74ffee8f5abc tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] Acquired lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 872.946645] env[62503]: DEBUG oslo_concurrency.lockutils [None req-ca6b327a-4e4e-486e-a0f1-74ffee8f5abc tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 872.946645] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-64a5206f-5ec8-4395-bc4e-ca8db118ab58 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.949482] env[62503]: DEBUG oslo_vmware.api [None req-ca6b327a-4e4e-486e-a0f1-74ffee8f5abc tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] Waiting for the task: (returnval){ [ 872.949482] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52ee7a90-32d4-8289-cee6-1333b3fc2321" [ 872.949482] env[62503]: _type = "Task" [ 872.949482] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 872.959707] env[62503]: DEBUG oslo_vmware.api [None req-ca6b327a-4e4e-486e-a0f1-74ffee8f5abc tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52ee7a90-32d4-8289-cee6-1333b3fc2321, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.984312] env[62503]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-4756d95c-78eb-4c15-bbbf-d6a4c5936632 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.183794] env[62503]: DEBUG nova.network.neutron [req-2f30e812-8da3-473d-8443-ea0a143a4396 req-bd157cc1-2c57-463e-9963-89cda051dd72 service nova] [instance: cf611345-d276-4745-a2f8-0551c9dca2c2] Updated VIF entry in instance network info cache for port 6a859ef4-58a2-46ba-9e2e-b0857df9cfea. {{(pid=62503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 873.183794] env[62503]: DEBUG nova.network.neutron [req-2f30e812-8da3-473d-8443-ea0a143a4396 req-bd157cc1-2c57-463e-9963-89cda051dd72 service nova] [instance: cf611345-d276-4745-a2f8-0551c9dca2c2] Updating instance_info_cache with network_info: [{"id": "6a859ef4-58a2-46ba-9e2e-b0857df9cfea", "address": "fa:16:3e:b1:7b:26", "network": {"id": "f6605088-4934-47d3-a3a3-7d24821eaddf", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1250737836-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d404225d6f9c46148e0b7080ec1eee99", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "16c6ea68-9b0e-4ac0-a484-7a9a40533017", "external-id": "nsx-vlan-transportzone-384", "segmentation_id": 384, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6a859ef4-58", "ovs_interfaceid": "6a859ef4-58a2-46ba-9e2e-b0857df9cfea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 873.190999] env[62503]: DEBUG oslo_vmware.api [None req-242ac6f9-00b2-4413-9a56-88c712c0138b tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] Task: {'id': task-1387807, 'name': Rename_Task, 'duration_secs': 0.189323} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 873.194479] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-242ac6f9-00b2-4413-9a56-88c712c0138b tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] [instance: 6229dda6-90e8-457b-beb3-2107e3700b29] Powering on the VM {{(pid=62503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 873.196982] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c8c2fa4c-5eea-43e4-b2f3-1101b08522f6 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.204960] env[62503]: DEBUG oslo_vmware.api [None req-242ac6f9-00b2-4413-9a56-88c712c0138b tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] Waiting for the task: (returnval){ [ 873.204960] env[62503]: value = "task-1387808" [ 873.204960] env[62503]: _type = "Task" [ 873.204960] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 873.220959] env[62503]: DEBUG oslo_vmware.api [None req-242ac6f9-00b2-4413-9a56-88c712c0138b tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] Task: {'id': task-1387808, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.367926] env[62503]: DEBUG oslo_concurrency.lockutils [None req-d5cf39b4-4dfb-4634-a37c-ca8390f95f74 tempest-AttachInterfacesTestJSON-1175081483 tempest-AttachInterfacesTestJSON-1175081483-project-member] Lock "04b9ed30-2cd0-4c07-9141-76f0f53fefb4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 89.227s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 873.438939] env[62503]: DEBUG oslo_concurrency.lockutils [None req-b540304e-d51f-4246-b92a-23f24c2fb495 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.770s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 873.439744] env[62503]: ERROR nova.compute.manager [None req-b540304e-d51f-4246-b92a-23f24c2fb495 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: c990f365-97df-4203-bd8c-dab822b2d8c3] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port cfe47a55-23d4-4918-ad96-c1a32a7fc784, please check neutron logs for more information. [ 873.439744] env[62503]: ERROR nova.compute.manager [instance: c990f365-97df-4203-bd8c-dab822b2d8c3] Traceback (most recent call last): [ 873.439744] env[62503]: ERROR nova.compute.manager [instance: c990f365-97df-4203-bd8c-dab822b2d8c3] File "/opt/stack/nova/nova/compute/manager.py", line 2644, in _build_and_run_instance [ 873.439744] env[62503]: ERROR nova.compute.manager [instance: c990f365-97df-4203-bd8c-dab822b2d8c3] self.driver.spawn(context, instance, image_meta, [ 873.439744] env[62503]: ERROR nova.compute.manager [instance: c990f365-97df-4203-bd8c-dab822b2d8c3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 873.439744] env[62503]: ERROR nova.compute.manager [instance: c990f365-97df-4203-bd8c-dab822b2d8c3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 873.439744] env[62503]: ERROR nova.compute.manager [instance: c990f365-97df-4203-bd8c-dab822b2d8c3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 873.439744] env[62503]: ERROR nova.compute.manager [instance: c990f365-97df-4203-bd8c-dab822b2d8c3] vm_ref = self.build_virtual_machine(instance, [ 873.439744] env[62503]: ERROR nova.compute.manager [instance: c990f365-97df-4203-bd8c-dab822b2d8c3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 873.439744] env[62503]: ERROR nova.compute.manager [instance: c990f365-97df-4203-bd8c-dab822b2d8c3] vif_infos = vmwarevif.get_vif_info(self._session, [ 873.439744] env[62503]: ERROR nova.compute.manager [instance: c990f365-97df-4203-bd8c-dab822b2d8c3] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 873.439744] env[62503]: ERROR nova.compute.manager [instance: c990f365-97df-4203-bd8c-dab822b2d8c3] for vif in network_info: [ 873.439744] env[62503]: ERROR nova.compute.manager [instance: c990f365-97df-4203-bd8c-dab822b2d8c3] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 873.439744] env[62503]: ERROR nova.compute.manager [instance: c990f365-97df-4203-bd8c-dab822b2d8c3] return self._sync_wrapper(fn, *args, **kwargs) [ 873.439744] env[62503]: ERROR nova.compute.manager [instance: c990f365-97df-4203-bd8c-dab822b2d8c3] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 873.439744] env[62503]: ERROR nova.compute.manager [instance: c990f365-97df-4203-bd8c-dab822b2d8c3] self.wait() [ 873.439744] env[62503]: ERROR nova.compute.manager [instance: c990f365-97df-4203-bd8c-dab822b2d8c3] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 873.439744] env[62503]: ERROR nova.compute.manager [instance: c990f365-97df-4203-bd8c-dab822b2d8c3] self[:] = self._gt.wait() [ 873.439744] env[62503]: ERROR nova.compute.manager [instance: c990f365-97df-4203-bd8c-dab822b2d8c3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 873.439744] env[62503]: ERROR nova.compute.manager [instance: c990f365-97df-4203-bd8c-dab822b2d8c3] return self._exit_event.wait() [ 873.439744] env[62503]: ERROR nova.compute.manager [instance: c990f365-97df-4203-bd8c-dab822b2d8c3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 873.439744] env[62503]: ERROR nova.compute.manager [instance: c990f365-97df-4203-bd8c-dab822b2d8c3] current.throw(*self._exc) [ 873.439744] env[62503]: ERROR nova.compute.manager [instance: c990f365-97df-4203-bd8c-dab822b2d8c3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 873.439744] env[62503]: ERROR nova.compute.manager [instance: c990f365-97df-4203-bd8c-dab822b2d8c3] result = function(*args, **kwargs) [ 873.439744] env[62503]: ERROR nova.compute.manager [instance: c990f365-97df-4203-bd8c-dab822b2d8c3] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 873.439744] env[62503]: ERROR nova.compute.manager [instance: c990f365-97df-4203-bd8c-dab822b2d8c3] return func(*args, **kwargs) [ 873.439744] env[62503]: ERROR nova.compute.manager [instance: c990f365-97df-4203-bd8c-dab822b2d8c3] File "/opt/stack/nova/nova/compute/manager.py", line 2014, in _allocate_network_async [ 873.439744] env[62503]: ERROR nova.compute.manager [instance: c990f365-97df-4203-bd8c-dab822b2d8c3] raise e [ 873.439744] env[62503]: ERROR nova.compute.manager [instance: c990f365-97df-4203-bd8c-dab822b2d8c3] File "/opt/stack/nova/nova/compute/manager.py", line 1992, in _allocate_network_async [ 873.439744] env[62503]: ERROR nova.compute.manager [instance: c990f365-97df-4203-bd8c-dab822b2d8c3] nwinfo = self.network_api.allocate_for_instance( [ 873.439744] env[62503]: ERROR nova.compute.manager [instance: c990f365-97df-4203-bd8c-dab822b2d8c3] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 873.439744] env[62503]: ERROR nova.compute.manager [instance: c990f365-97df-4203-bd8c-dab822b2d8c3] created_port_ids = self._update_ports_for_instance( [ 873.439744] env[62503]: ERROR nova.compute.manager [instance: c990f365-97df-4203-bd8c-dab822b2d8c3] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 873.439744] env[62503]: ERROR nova.compute.manager [instance: c990f365-97df-4203-bd8c-dab822b2d8c3] with excutils.save_and_reraise_exception(): [ 873.439744] env[62503]: ERROR nova.compute.manager [instance: c990f365-97df-4203-bd8c-dab822b2d8c3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 873.439744] env[62503]: ERROR nova.compute.manager [instance: c990f365-97df-4203-bd8c-dab822b2d8c3] self.force_reraise() [ 873.439744] env[62503]: ERROR nova.compute.manager [instance: c990f365-97df-4203-bd8c-dab822b2d8c3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 873.439744] env[62503]: ERROR nova.compute.manager [instance: c990f365-97df-4203-bd8c-dab822b2d8c3] raise self.value [ 873.440794] env[62503]: ERROR nova.compute.manager [instance: c990f365-97df-4203-bd8c-dab822b2d8c3] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 873.440794] env[62503]: ERROR nova.compute.manager [instance: c990f365-97df-4203-bd8c-dab822b2d8c3] updated_port = self._update_port( [ 873.440794] env[62503]: ERROR nova.compute.manager [instance: c990f365-97df-4203-bd8c-dab822b2d8c3] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 873.440794] env[62503]: ERROR nova.compute.manager [instance: c990f365-97df-4203-bd8c-dab822b2d8c3] _ensure_no_port_binding_failure(port) [ 873.440794] env[62503]: ERROR nova.compute.manager [instance: c990f365-97df-4203-bd8c-dab822b2d8c3] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 873.440794] env[62503]: ERROR nova.compute.manager [instance: c990f365-97df-4203-bd8c-dab822b2d8c3] raise exception.PortBindingFailed(port_id=port['id']) [ 873.440794] env[62503]: ERROR nova.compute.manager [instance: c990f365-97df-4203-bd8c-dab822b2d8c3] nova.exception.PortBindingFailed: Binding failed for port cfe47a55-23d4-4918-ad96-c1a32a7fc784, please check neutron logs for more information. [ 873.440794] env[62503]: ERROR nova.compute.manager [instance: c990f365-97df-4203-bd8c-dab822b2d8c3] [ 873.441090] env[62503]: DEBUG nova.compute.utils [None req-b540304e-d51f-4246-b92a-23f24c2fb495 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: c990f365-97df-4203-bd8c-dab822b2d8c3] Binding failed for port cfe47a55-23d4-4918-ad96-c1a32a7fc784, please check neutron logs for more information. {{(pid=62503) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 873.442923] env[62503]: DEBUG oslo_concurrency.lockutils [None req-157c52a0-86d9-4396-8baf-ed1051ee6632 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.088s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 873.444832] env[62503]: INFO nova.compute.claims [None req-157c52a0-86d9-4396-8baf-ed1051ee6632 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: e693bcc2-3883-466d-913c-831146ca81e7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 873.448353] env[62503]: DEBUG nova.compute.manager [None req-b540304e-d51f-4246-b92a-23f24c2fb495 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: c990f365-97df-4203-bd8c-dab822b2d8c3] Build of instance c990f365-97df-4203-bd8c-dab822b2d8c3 was re-scheduled: Binding failed for port cfe47a55-23d4-4918-ad96-c1a32a7fc784, please check neutron logs for more information. {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2483}} [ 873.449633] env[62503]: DEBUG nova.compute.manager [None req-b540304e-d51f-4246-b92a-23f24c2fb495 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: c990f365-97df-4203-bd8c-dab822b2d8c3] Unplugging VIFs for instance {{(pid=62503) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3009}} [ 873.449633] env[62503]: DEBUG oslo_concurrency.lockutils [None req-b540304e-d51f-4246-b92a-23f24c2fb495 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Acquiring lock "refresh_cache-c990f365-97df-4203-bd8c-dab822b2d8c3" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 873.449633] env[62503]: DEBUG oslo_concurrency.lockutils [None req-b540304e-d51f-4246-b92a-23f24c2fb495 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Acquired lock "refresh_cache-c990f365-97df-4203-bd8c-dab822b2d8c3" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 873.449633] env[62503]: DEBUG nova.network.neutron [None req-b540304e-d51f-4246-b92a-23f24c2fb495 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: c990f365-97df-4203-bd8c-dab822b2d8c3] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 873.461450] env[62503]: DEBUG oslo_vmware.api [None req-ca6b327a-4e4e-486e-a0f1-74ffee8f5abc tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52ee7a90-32d4-8289-cee6-1333b3fc2321, 'name': SearchDatastore_Task, 'duration_secs': 0.009976} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 873.464436] env[62503]: DEBUG oslo_concurrency.lockutils [None req-ca6b327a-4e4e-486e-a0f1-74ffee8f5abc tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] Releasing lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 873.464436] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-ca6b327a-4e4e-486e-a0f1-74ffee8f5abc tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] [instance: cf611345-d276-4745-a2f8-0551c9dca2c2] Processing image 8150ca02-f879-471d-8913-459408f127a1 {{(pid=62503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 873.464436] env[62503]: DEBUG oslo_concurrency.lockutils [None req-ca6b327a-4e4e-486e-a0f1-74ffee8f5abc tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 873.464436] env[62503]: DEBUG oslo_concurrency.lockutils [None req-ca6b327a-4e4e-486e-a0f1-74ffee8f5abc tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] Acquired lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 873.464436] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-ca6b327a-4e4e-486e-a0f1-74ffee8f5abc tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 873.465186] env[62503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ca3e66de-67cf-48fd-ba26-a3f842ac1e44 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.474475] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-ca6b327a-4e4e-486e-a0f1-74ffee8f5abc tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 873.474776] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-ca6b327a-4e4e-486e-a0f1-74ffee8f5abc tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 873.475599] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d0f897db-9ce2-414d-b23e-a0868abd1b85 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.481579] env[62503]: DEBUG oslo_vmware.api [None req-ca6b327a-4e4e-486e-a0f1-74ffee8f5abc tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] Waiting for the task: (returnval){ [ 873.481579] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]528054a7-dc17-e519-4efb-e96d8dc87919" [ 873.481579] env[62503]: _type = "Task" [ 873.481579] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 873.492334] env[62503]: DEBUG oslo_vmware.api [None req-ca6b327a-4e4e-486e-a0f1-74ffee8f5abc tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]528054a7-dc17-e519-4efb-e96d8dc87919, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.690699] env[62503]: DEBUG oslo_concurrency.lockutils [req-2f30e812-8da3-473d-8443-ea0a143a4396 req-bd157cc1-2c57-463e-9963-89cda051dd72 service nova] Releasing lock "refresh_cache-cf611345-d276-4745-a2f8-0551c9dca2c2" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 873.698430] env[62503]: DEBUG nova.compute.manager [None req-fc728101-2fc5-4ffa-a37b-1d922196d83a tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: b6fddb0d-70f5-433f-a0ef-0d6bffb35579] Start spawning the instance on the hypervisor. {{(pid=62503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2641}} [ 873.715673] env[62503]: DEBUG oslo_vmware.api [None req-242ac6f9-00b2-4413-9a56-88c712c0138b tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] Task: {'id': task-1387808, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.724886] env[62503]: DEBUG nova.virt.hardware [None req-fc728101-2fc5-4ffa-a37b-1d922196d83a tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:26:20Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:26:03Z,direct_url=,disk_format='vmdk',id=8150ca02-f879-471d-8913-459408f127a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26d9ee75d25b4018b8bb1fb11c8bd98c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:26:04Z,virtual_size=,visibility=), allow threads: False {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 873.725367] env[62503]: DEBUG nova.virt.hardware [None req-fc728101-2fc5-4ffa-a37b-1d922196d83a tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Flavor limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 873.725541] env[62503]: DEBUG nova.virt.hardware [None req-fc728101-2fc5-4ffa-a37b-1d922196d83a tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Image limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 873.726191] env[62503]: DEBUG nova.virt.hardware [None req-fc728101-2fc5-4ffa-a37b-1d922196d83a tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Flavor pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 873.726415] env[62503]: DEBUG nova.virt.hardware [None req-fc728101-2fc5-4ffa-a37b-1d922196d83a tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Image pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 873.726752] env[62503]: DEBUG nova.virt.hardware [None req-fc728101-2fc5-4ffa-a37b-1d922196d83a tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 873.727909] env[62503]: DEBUG nova.virt.hardware [None req-fc728101-2fc5-4ffa-a37b-1d922196d83a tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 873.730518] env[62503]: DEBUG nova.virt.hardware [None req-fc728101-2fc5-4ffa-a37b-1d922196d83a tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 873.730958] env[62503]: DEBUG nova.virt.hardware [None req-fc728101-2fc5-4ffa-a37b-1d922196d83a tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Got 1 possible topologies {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 873.731240] env[62503]: DEBUG nova.virt.hardware [None req-fc728101-2fc5-4ffa-a37b-1d922196d83a tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 873.731446] env[62503]: DEBUG nova.virt.hardware [None req-fc728101-2fc5-4ffa-a37b-1d922196d83a tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 873.732370] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34d85274-b1d9-4ad2-ba96-6510049aaebc {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.747082] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e948915-44d0-48ec-84a9-1a475a4bbea4 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.976881] env[62503]: DEBUG nova.network.neutron [None req-b540304e-d51f-4246-b92a-23f24c2fb495 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: c990f365-97df-4203-bd8c-dab822b2d8c3] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 873.994163] env[62503]: DEBUG oslo_vmware.api [None req-ca6b327a-4e4e-486e-a0f1-74ffee8f5abc tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]528054a7-dc17-e519-4efb-e96d8dc87919, 'name': SearchDatastore_Task, 'duration_secs': 0.008155} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 873.995179] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6de6ef22-18c5-4cbe-ac8c-9a7eb5640ef9 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.002219] env[62503]: DEBUG oslo_vmware.api [None req-ca6b327a-4e4e-486e-a0f1-74ffee8f5abc tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] Waiting for the task: (returnval){ [ 874.002219] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52fd967b-aa09-c279-31f7-0e809b2b48dc" [ 874.002219] env[62503]: _type = "Task" [ 874.002219] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 874.010964] env[62503]: DEBUG oslo_vmware.api [None req-ca6b327a-4e4e-486e-a0f1-74ffee8f5abc tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52fd967b-aa09-c279-31f7-0e809b2b48dc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.081909] env[62503]: DEBUG nova.network.neutron [None req-b540304e-d51f-4246-b92a-23f24c2fb495 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: c990f365-97df-4203-bd8c-dab822b2d8c3] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 874.216738] env[62503]: DEBUG oslo_vmware.api [None req-242ac6f9-00b2-4413-9a56-88c712c0138b tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] Task: {'id': task-1387808, 'name': PowerOnVM_Task, 'duration_secs': 0.645159} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 874.217019] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-242ac6f9-00b2-4413-9a56-88c712c0138b tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] [instance: 6229dda6-90e8-457b-beb3-2107e3700b29] Powered on the VM {{(pid=62503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 874.217269] env[62503]: INFO nova.compute.manager [None req-242ac6f9-00b2-4413-9a56-88c712c0138b tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] [instance: 6229dda6-90e8-457b-beb3-2107e3700b29] Took 11.11 seconds to spawn the instance on the hypervisor. [ 874.217464] env[62503]: DEBUG nova.compute.manager [None req-242ac6f9-00b2-4413-9a56-88c712c0138b tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] [instance: 6229dda6-90e8-457b-beb3-2107e3700b29] Checking state {{(pid=62503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1794}} [ 874.218320] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b98068b-2413-414f-82ba-3a7b422fd0de {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.448795] env[62503]: DEBUG nova.compute.manager [req-f3239ee3-6cb1-4598-ae52-3fc92680af23 req-920cb091-708a-4ab8-9842-fcead23a75bf service nova] [instance: b6fddb0d-70f5-433f-a0ef-0d6bffb35579] Received event network-vif-plugged-24ac3187-6729-47ea-beb6-4c96018b8a05 {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 874.449052] env[62503]: DEBUG oslo_concurrency.lockutils [req-f3239ee3-6cb1-4598-ae52-3fc92680af23 req-920cb091-708a-4ab8-9842-fcead23a75bf service nova] Acquiring lock "b6fddb0d-70f5-433f-a0ef-0d6bffb35579-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 874.449314] env[62503]: DEBUG oslo_concurrency.lockutils [req-f3239ee3-6cb1-4598-ae52-3fc92680af23 req-920cb091-708a-4ab8-9842-fcead23a75bf service nova] Lock "b6fddb0d-70f5-433f-a0ef-0d6bffb35579-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 874.449438] env[62503]: DEBUG oslo_concurrency.lockutils [req-f3239ee3-6cb1-4598-ae52-3fc92680af23 req-920cb091-708a-4ab8-9842-fcead23a75bf service nova] Lock "b6fddb0d-70f5-433f-a0ef-0d6bffb35579-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 874.449613] env[62503]: DEBUG nova.compute.manager [req-f3239ee3-6cb1-4598-ae52-3fc92680af23 req-920cb091-708a-4ab8-9842-fcead23a75bf service nova] [instance: b6fddb0d-70f5-433f-a0ef-0d6bffb35579] No waiting events found dispatching network-vif-plugged-24ac3187-6729-47ea-beb6-4c96018b8a05 {{(pid=62503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 874.449779] env[62503]: WARNING nova.compute.manager [req-f3239ee3-6cb1-4598-ae52-3fc92680af23 req-920cb091-708a-4ab8-9842-fcead23a75bf service nova] [instance: b6fddb0d-70f5-433f-a0ef-0d6bffb35579] Received unexpected event network-vif-plugged-24ac3187-6729-47ea-beb6-4c96018b8a05 for instance with vm_state building and task_state spawning. [ 874.516318] env[62503]: DEBUG oslo_vmware.api [None req-ca6b327a-4e4e-486e-a0f1-74ffee8f5abc tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52fd967b-aa09-c279-31f7-0e809b2b48dc, 'name': SearchDatastore_Task, 'duration_secs': 0.010883} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 874.521377] env[62503]: DEBUG oslo_concurrency.lockutils [None req-ca6b327a-4e4e-486e-a0f1-74ffee8f5abc tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] Releasing lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 874.521941] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca6b327a-4e4e-486e-a0f1-74ffee8f5abc tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk to [datastore1] cf611345-d276-4745-a2f8-0551c9dca2c2/cf611345-d276-4745-a2f8-0551c9dca2c2.vmdk {{(pid=62503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 874.527763] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-43a903eb-4ed3-42d4-b742-7e99ebc25b24 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.535188] env[62503]: DEBUG oslo_vmware.api [None req-ca6b327a-4e4e-486e-a0f1-74ffee8f5abc tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] Waiting for the task: (returnval){ [ 874.535188] env[62503]: value = "task-1387809" [ 874.535188] env[62503]: _type = "Task" [ 874.535188] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 874.546631] env[62503]: DEBUG oslo_vmware.api [None req-ca6b327a-4e4e-486e-a0f1-74ffee8f5abc tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] Task: {'id': task-1387809, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.585140] env[62503]: DEBUG oslo_concurrency.lockutils [None req-b540304e-d51f-4246-b92a-23f24c2fb495 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Releasing lock "refresh_cache-c990f365-97df-4203-bd8c-dab822b2d8c3" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 874.585412] env[62503]: DEBUG nova.compute.manager [None req-b540304e-d51f-4246-b92a-23f24c2fb495 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62503) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3032}} [ 874.585628] env[62503]: DEBUG nova.compute.manager [None req-b540304e-d51f-4246-b92a-23f24c2fb495 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: c990f365-97df-4203-bd8c-dab822b2d8c3] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 874.585928] env[62503]: DEBUG nova.network.neutron [None req-b540304e-d51f-4246-b92a-23f24c2fb495 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: c990f365-97df-4203-bd8c-dab822b2d8c3] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 874.612635] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-571dfd2c-ecec-4854-96e4-13f8b2fa1e9c {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.616016] env[62503]: DEBUG nova.network.neutron [None req-b540304e-d51f-4246-b92a-23f24c2fb495 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: c990f365-97df-4203-bd8c-dab822b2d8c3] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 874.622175] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb5464ea-25fa-4720-a24b-5574a50916f0 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.655167] env[62503]: DEBUG nova.network.neutron [None req-fc728101-2fc5-4ffa-a37b-1d922196d83a tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: b6fddb0d-70f5-433f-a0ef-0d6bffb35579] Successfully updated port: 24ac3187-6729-47ea-beb6-4c96018b8a05 {{(pid=62503) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 874.657161] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b7886cc-ba08-40a6-bd10-10ac8330848d {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.665446] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d8e57d2-57d6-4aca-a47b-ed57e5970252 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.681391] env[62503]: DEBUG nova.compute.provider_tree [None req-157c52a0-86d9-4396-8baf-ed1051ee6632 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 874.742923] env[62503]: INFO nova.compute.manager [None req-242ac6f9-00b2-4413-9a56-88c712c0138b tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] [instance: 6229dda6-90e8-457b-beb3-2107e3700b29] Took 33.55 seconds to build instance. [ 875.048452] env[62503]: DEBUG oslo_vmware.api [None req-ca6b327a-4e4e-486e-a0f1-74ffee8f5abc tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] Task: {'id': task-1387809, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.486631} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 875.049290] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca6b327a-4e4e-486e-a0f1-74ffee8f5abc tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk to [datastore1] cf611345-d276-4745-a2f8-0551c9dca2c2/cf611345-d276-4745-a2f8-0551c9dca2c2.vmdk {{(pid=62503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 875.049546] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-ca6b327a-4e4e-486e-a0f1-74ffee8f5abc tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] [instance: cf611345-d276-4745-a2f8-0551c9dca2c2] Extending root virtual disk to 1048576 {{(pid=62503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 875.049816] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-868b3c4c-eb3a-4fb9-b15d-4f13afff03eb {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.058702] env[62503]: DEBUG oslo_vmware.api [None req-ca6b327a-4e4e-486e-a0f1-74ffee8f5abc tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] Waiting for the task: (returnval){ [ 875.058702] env[62503]: value = "task-1387810" [ 875.058702] env[62503]: _type = "Task" [ 875.058702] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 875.065748] env[62503]: DEBUG oslo_vmware.api [None req-ca6b327a-4e4e-486e-a0f1-74ffee8f5abc tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] Task: {'id': task-1387810, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.123567] env[62503]: DEBUG nova.network.neutron [None req-b540304e-d51f-4246-b92a-23f24c2fb495 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: c990f365-97df-4203-bd8c-dab822b2d8c3] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 875.161874] env[62503]: DEBUG oslo_concurrency.lockutils [None req-fc728101-2fc5-4ffa-a37b-1d922196d83a tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Acquiring lock "refresh_cache-b6fddb0d-70f5-433f-a0ef-0d6bffb35579" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 875.161874] env[62503]: DEBUG oslo_concurrency.lockutils [None req-fc728101-2fc5-4ffa-a37b-1d922196d83a tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Acquired lock "refresh_cache-b6fddb0d-70f5-433f-a0ef-0d6bffb35579" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 875.161973] env[62503]: DEBUG nova.network.neutron [None req-fc728101-2fc5-4ffa-a37b-1d922196d83a tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: b6fddb0d-70f5-433f-a0ef-0d6bffb35579] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 875.185462] env[62503]: DEBUG nova.scheduler.client.report [None req-157c52a0-86d9-4396-8baf-ed1051ee6632 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 875.246358] env[62503]: DEBUG oslo_concurrency.lockutils [None req-242ac6f9-00b2-4413-9a56-88c712c0138b tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] Lock "6229dda6-90e8-457b-beb3-2107e3700b29" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 61.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 875.535394] env[62503]: DEBUG oslo_concurrency.lockutils [None req-65c4b598-043d-4ffb-9049-2bd83f4e3fd6 tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] Acquiring lock "6229dda6-90e8-457b-beb3-2107e3700b29" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 875.535682] env[62503]: DEBUG oslo_concurrency.lockutils [None req-65c4b598-043d-4ffb-9049-2bd83f4e3fd6 tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] Lock "6229dda6-90e8-457b-beb3-2107e3700b29" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 875.535908] env[62503]: DEBUG oslo_concurrency.lockutils [None req-65c4b598-043d-4ffb-9049-2bd83f4e3fd6 tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] Acquiring lock "6229dda6-90e8-457b-beb3-2107e3700b29-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 875.536309] env[62503]: DEBUG oslo_concurrency.lockutils [None req-65c4b598-043d-4ffb-9049-2bd83f4e3fd6 tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] Lock "6229dda6-90e8-457b-beb3-2107e3700b29-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 875.536538] env[62503]: DEBUG oslo_concurrency.lockutils [None req-65c4b598-043d-4ffb-9049-2bd83f4e3fd6 tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] Lock "6229dda6-90e8-457b-beb3-2107e3700b29-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 875.538903] env[62503]: INFO nova.compute.manager [None req-65c4b598-043d-4ffb-9049-2bd83f4e3fd6 tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] [instance: 6229dda6-90e8-457b-beb3-2107e3700b29] Terminating instance [ 875.540910] env[62503]: DEBUG nova.compute.manager [None req-65c4b598-043d-4ffb-9049-2bd83f4e3fd6 tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] [instance: 6229dda6-90e8-457b-beb3-2107e3700b29] Start destroying the instance on the hypervisor. {{(pid=62503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3156}} [ 875.541135] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-65c4b598-043d-4ffb-9049-2bd83f4e3fd6 tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] [instance: 6229dda6-90e8-457b-beb3-2107e3700b29] Destroying instance {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 875.541994] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f25ab22-5deb-4b95-a901-a1b76e90e49f {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.550658] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-65c4b598-043d-4ffb-9049-2bd83f4e3fd6 tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] [instance: 6229dda6-90e8-457b-beb3-2107e3700b29] Powering off the VM {{(pid=62503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 875.550658] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2681bb1d-c410-432d-830b-edd5158cede4 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.558178] env[62503]: DEBUG oslo_vmware.api [None req-65c4b598-043d-4ffb-9049-2bd83f4e3fd6 tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] Waiting for the task: (returnval){ [ 875.558178] env[62503]: value = "task-1387811" [ 875.558178] env[62503]: _type = "Task" [ 875.558178] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 875.571489] env[62503]: DEBUG oslo_vmware.api [None req-65c4b598-043d-4ffb-9049-2bd83f4e3fd6 tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] Task: {'id': task-1387811, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.574876] env[62503]: DEBUG oslo_vmware.api [None req-ca6b327a-4e4e-486e-a0f1-74ffee8f5abc tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] Task: {'id': task-1387810, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076811} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 875.575639] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-ca6b327a-4e4e-486e-a0f1-74ffee8f5abc tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] [instance: cf611345-d276-4745-a2f8-0551c9dca2c2] Extended root virtual disk {{(pid=62503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 875.576492] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0984ec45-be3c-4c7b-a8d2-f2add79b1d7e {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.602294] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-ca6b327a-4e4e-486e-a0f1-74ffee8f5abc tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] [instance: cf611345-d276-4745-a2f8-0551c9dca2c2] Reconfiguring VM instance instance-00000046 to attach disk [datastore1] cf611345-d276-4745-a2f8-0551c9dca2c2/cf611345-d276-4745-a2f8-0551c9dca2c2.vmdk or device None with type sparse {{(pid=62503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 875.603371] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-452a6f2d-bbad-4f72-98b2-74a8dc4db6ca {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.626301] env[62503]: INFO nova.compute.manager [None req-b540304e-d51f-4246-b92a-23f24c2fb495 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: c990f365-97df-4203-bd8c-dab822b2d8c3] Took 1.04 seconds to deallocate network for instance. [ 875.630934] env[62503]: DEBUG oslo_vmware.api [None req-ca6b327a-4e4e-486e-a0f1-74ffee8f5abc tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] Waiting for the task: (returnval){ [ 875.630934] env[62503]: value = "task-1387812" [ 875.630934] env[62503]: _type = "Task" [ 875.630934] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 875.639604] env[62503]: DEBUG oslo_vmware.api [None req-ca6b327a-4e4e-486e-a0f1-74ffee8f5abc tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] Task: {'id': task-1387812, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.691791] env[62503]: DEBUG oslo_concurrency.lockutils [None req-157c52a0-86d9-4396-8baf-ed1051ee6632 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.249s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 875.692339] env[62503]: DEBUG nova.compute.manager [None req-157c52a0-86d9-4396-8baf-ed1051ee6632 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: e693bcc2-3883-466d-913c-831146ca81e7] Start building networks asynchronously for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2832}} [ 875.695062] env[62503]: DEBUG oslo_concurrency.lockutils [None req-a01f7696-ade1-4d90-9188-a82a8c2369af tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.650s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 875.696486] env[62503]: INFO nova.compute.claims [None req-a01f7696-ade1-4d90-9188-a82a8c2369af tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: c9129f68-c755-4b78-b067-b77b01048c02] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 875.712579] env[62503]: DEBUG nova.network.neutron [None req-fc728101-2fc5-4ffa-a37b-1d922196d83a tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: b6fddb0d-70f5-433f-a0ef-0d6bffb35579] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 876.072816] env[62503]: DEBUG oslo_vmware.api [None req-65c4b598-043d-4ffb-9049-2bd83f4e3fd6 tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] Task: {'id': task-1387811, 'name': PowerOffVM_Task, 'duration_secs': 0.241295} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 876.073180] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-65c4b598-043d-4ffb-9049-2bd83f4e3fd6 tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] [instance: 6229dda6-90e8-457b-beb3-2107e3700b29] Powered off the VM {{(pid=62503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 876.073393] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-65c4b598-043d-4ffb-9049-2bd83f4e3fd6 tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] [instance: 6229dda6-90e8-457b-beb3-2107e3700b29] Unregistering the VM {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 876.073677] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d658cdec-cf8e-42d9-9f5b-f82f64cb3f48 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.146590] env[62503]: DEBUG oslo_vmware.api [None req-ca6b327a-4e4e-486e-a0f1-74ffee8f5abc tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] Task: {'id': task-1387812, 'name': ReconfigVM_Task, 'duration_secs': 0.276207} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 876.146773] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-ca6b327a-4e4e-486e-a0f1-74ffee8f5abc tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] [instance: cf611345-d276-4745-a2f8-0551c9dca2c2] Reconfigured VM instance instance-00000046 to attach disk [datastore1] cf611345-d276-4745-a2f8-0551c9dca2c2/cf611345-d276-4745-a2f8-0551c9dca2c2.vmdk or device None with type sparse {{(pid=62503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 876.147484] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d049944e-cb25-404c-947f-5ba5b9065460 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.155387] env[62503]: DEBUG oslo_vmware.api [None req-ca6b327a-4e4e-486e-a0f1-74ffee8f5abc tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] Waiting for the task: (returnval){ [ 876.155387] env[62503]: value = "task-1387814" [ 876.155387] env[62503]: _type = "Task" [ 876.155387] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 876.164766] env[62503]: DEBUG oslo_vmware.api [None req-ca6b327a-4e4e-486e-a0f1-74ffee8f5abc tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] Task: {'id': task-1387814, 'name': Rename_Task} progress is 5%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.177123] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-65c4b598-043d-4ffb-9049-2bd83f4e3fd6 tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] [instance: 6229dda6-90e8-457b-beb3-2107e3700b29] Unregistered the VM {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 876.177357] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-65c4b598-043d-4ffb-9049-2bd83f4e3fd6 tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] [instance: 6229dda6-90e8-457b-beb3-2107e3700b29] Deleting contents of the VM from datastore datastore1 {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 876.179729] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-65c4b598-043d-4ffb-9049-2bd83f4e3fd6 tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] Deleting the datastore file [datastore1] 6229dda6-90e8-457b-beb3-2107e3700b29 {{(pid=62503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 876.179729] env[62503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d9398188-41d1-46ec-bc57-98818fc8d75e {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.185079] env[62503]: DEBUG oslo_vmware.api [None req-65c4b598-043d-4ffb-9049-2bd83f4e3fd6 tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] Waiting for the task: (returnval){ [ 876.185079] env[62503]: value = "task-1387815" [ 876.185079] env[62503]: _type = "Task" [ 876.185079] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 876.197482] env[62503]: DEBUG oslo_vmware.api [None req-65c4b598-043d-4ffb-9049-2bd83f4e3fd6 tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] Task: {'id': task-1387815, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.197482] env[62503]: DEBUG nova.network.neutron [None req-fc728101-2fc5-4ffa-a37b-1d922196d83a tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: b6fddb0d-70f5-433f-a0ef-0d6bffb35579] Updating instance_info_cache with network_info: [{"id": "24ac3187-6729-47ea-beb6-4c96018b8a05", "address": "fa:16:3e:f9:f1:4f", "network": {"id": "1705446b-db63-4b14-9929-b692ca586b36", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1437181237-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e818e5ee9dc24efa96747c9558514a15", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "77aa121f-8fb6-42f3-aaea-43addfe449b2", "external-id": "nsx-vlan-transportzone-288", "segmentation_id": 288, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap24ac3187-67", "ovs_interfaceid": "24ac3187-6729-47ea-beb6-4c96018b8a05", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 876.201785] env[62503]: DEBUG nova.compute.utils [None req-157c52a0-86d9-4396-8baf-ed1051ee6632 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Using /dev/sd instead of None {{(pid=62503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 876.204545] env[62503]: DEBUG nova.compute.manager [None req-157c52a0-86d9-4396-8baf-ed1051ee6632 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: e693bcc2-3883-466d-913c-831146ca81e7] Allocating IP information in the background. {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 876.204714] env[62503]: DEBUG nova.network.neutron [None req-157c52a0-86d9-4396-8baf-ed1051ee6632 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: e693bcc2-3883-466d-913c-831146ca81e7] allocate_for_instance() {{(pid=62503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 876.299040] env[62503]: DEBUG nova.policy [None req-157c52a0-86d9-4396-8baf-ed1051ee6632 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8d1fa794892747598a9c0b50bfd82581', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '12a42517cf8f4ad3836f2f95e8833dd4', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62503) authorize /opt/stack/nova/nova/policy.py:201}} [ 876.489930] env[62503]: DEBUG nova.compute.manager [req-933df28b-6c8c-4ead-bdf9-e5b492dd5cb8 req-796aeb3a-57fd-429d-94c2-93133589b718 service nova] [instance: b6fddb0d-70f5-433f-a0ef-0d6bffb35579] Received event network-changed-24ac3187-6729-47ea-beb6-4c96018b8a05 {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 876.490190] env[62503]: DEBUG nova.compute.manager [req-933df28b-6c8c-4ead-bdf9-e5b492dd5cb8 req-796aeb3a-57fd-429d-94c2-93133589b718 service nova] [instance: b6fddb0d-70f5-433f-a0ef-0d6bffb35579] Refreshing instance network info cache due to event network-changed-24ac3187-6729-47ea-beb6-4c96018b8a05. {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11432}} [ 876.490859] env[62503]: DEBUG oslo_concurrency.lockutils [req-933df28b-6c8c-4ead-bdf9-e5b492dd5cb8 req-796aeb3a-57fd-429d-94c2-93133589b718 service nova] Acquiring lock "refresh_cache-b6fddb0d-70f5-433f-a0ef-0d6bffb35579" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 876.665675] env[62503]: DEBUG oslo_vmware.api [None req-ca6b327a-4e4e-486e-a0f1-74ffee8f5abc tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] Task: {'id': task-1387814, 'name': Rename_Task, 'duration_secs': 0.159666} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 876.665949] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca6b327a-4e4e-486e-a0f1-74ffee8f5abc tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] [instance: cf611345-d276-4745-a2f8-0551c9dca2c2] Powering on the VM {{(pid=62503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 876.666233] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4fe8c305-5a3b-471e-8739-1c8f9be50c39 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.673359] env[62503]: DEBUG oslo_vmware.api [None req-ca6b327a-4e4e-486e-a0f1-74ffee8f5abc tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] Waiting for the task: (returnval){ [ 876.673359] env[62503]: value = "task-1387816" [ 876.673359] env[62503]: _type = "Task" [ 876.673359] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 876.677493] env[62503]: INFO nova.scheduler.client.report [None req-b540304e-d51f-4246-b92a-23f24c2fb495 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Deleted allocations for instance c990f365-97df-4203-bd8c-dab822b2d8c3 [ 876.687630] env[62503]: DEBUG oslo_vmware.api [None req-ca6b327a-4e4e-486e-a0f1-74ffee8f5abc tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] Task: {'id': task-1387816, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.697140] env[62503]: DEBUG oslo_vmware.api [None req-65c4b598-043d-4ffb-9049-2bd83f4e3fd6 tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] Task: {'id': task-1387815, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.192382} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 876.699820] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-65c4b598-043d-4ffb-9049-2bd83f4e3fd6 tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] Deleted the datastore file {{(pid=62503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 876.699820] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-65c4b598-043d-4ffb-9049-2bd83f4e3fd6 tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] [instance: 6229dda6-90e8-457b-beb3-2107e3700b29] Deleted contents of the VM from datastore datastore1 {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 876.699820] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-65c4b598-043d-4ffb-9049-2bd83f4e3fd6 tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] [instance: 6229dda6-90e8-457b-beb3-2107e3700b29] Instance destroyed {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 876.699820] env[62503]: INFO nova.compute.manager [None req-65c4b598-043d-4ffb-9049-2bd83f4e3fd6 tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] [instance: 6229dda6-90e8-457b-beb3-2107e3700b29] Took 1.16 seconds to destroy the instance on the hypervisor. [ 876.699820] env[62503]: DEBUG oslo.service.loopingcall [None req-65c4b598-043d-4ffb-9049-2bd83f4e3fd6 tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 876.699820] env[62503]: DEBUG nova.compute.manager [-] [instance: 6229dda6-90e8-457b-beb3-2107e3700b29] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 876.699820] env[62503]: DEBUG nova.network.neutron [-] [instance: 6229dda6-90e8-457b-beb3-2107e3700b29] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 876.701247] env[62503]: DEBUG oslo_concurrency.lockutils [None req-fc728101-2fc5-4ffa-a37b-1d922196d83a tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Releasing lock "refresh_cache-b6fddb0d-70f5-433f-a0ef-0d6bffb35579" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 876.701591] env[62503]: DEBUG nova.compute.manager [None req-fc728101-2fc5-4ffa-a37b-1d922196d83a tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: b6fddb0d-70f5-433f-a0ef-0d6bffb35579] Instance network_info: |[{"id": "24ac3187-6729-47ea-beb6-4c96018b8a05", "address": "fa:16:3e:f9:f1:4f", "network": {"id": "1705446b-db63-4b14-9929-b692ca586b36", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1437181237-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e818e5ee9dc24efa96747c9558514a15", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "77aa121f-8fb6-42f3-aaea-43addfe449b2", "external-id": "nsx-vlan-transportzone-288", "segmentation_id": 288, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap24ac3187-67", "ovs_interfaceid": "24ac3187-6729-47ea-beb6-4c96018b8a05", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1999}} [ 876.701889] env[62503]: DEBUG oslo_concurrency.lockutils [req-933df28b-6c8c-4ead-bdf9-e5b492dd5cb8 req-796aeb3a-57fd-429d-94c2-93133589b718 service nova] Acquired lock "refresh_cache-b6fddb0d-70f5-433f-a0ef-0d6bffb35579" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 876.702121] env[62503]: DEBUG nova.network.neutron [req-933df28b-6c8c-4ead-bdf9-e5b492dd5cb8 req-796aeb3a-57fd-429d-94c2-93133589b718 service nova] [instance: b6fddb0d-70f5-433f-a0ef-0d6bffb35579] Refreshing network info cache for port 24ac3187-6729-47ea-beb6-4c96018b8a05 {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 876.703539] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-fc728101-2fc5-4ffa-a37b-1d922196d83a tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: b6fddb0d-70f5-433f-a0ef-0d6bffb35579] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f9:f1:4f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '77aa121f-8fb6-42f3-aaea-43addfe449b2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '24ac3187-6729-47ea-beb6-4c96018b8a05', 'vif_model': 'vmxnet3'}] {{(pid=62503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 876.712092] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc728101-2fc5-4ffa-a37b-1d922196d83a tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Creating folder: Project (e818e5ee9dc24efa96747c9558514a15). Parent ref: group-v294540. {{(pid=62503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 876.713542] env[62503]: DEBUG nova.compute.manager [None req-157c52a0-86d9-4396-8baf-ed1051ee6632 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: e693bcc2-3883-466d-913c-831146ca81e7] Start building block device mappings for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2867}} [ 876.719370] env[62503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2e8eb44a-ea32-4161-9df6-450492e0f8d6 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.731409] env[62503]: DEBUG nova.network.neutron [None req-157c52a0-86d9-4396-8baf-ed1051ee6632 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: e693bcc2-3883-466d-913c-831146ca81e7] Successfully created port: 71aa781d-4a40-4f00-8fb8-06cb4c73986a {{(pid=62503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 876.736082] env[62503]: INFO nova.virt.vmwareapi.vm_util [None req-fc728101-2fc5-4ffa-a37b-1d922196d83a tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Created folder: Project (e818e5ee9dc24efa96747c9558514a15) in parent group-v294540. [ 876.736082] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc728101-2fc5-4ffa-a37b-1d922196d83a tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Creating folder: Instances. Parent ref: group-v294580. {{(pid=62503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 876.736082] env[62503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-cb355184-c6fb-44a6-9982-c7491b3bb454 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.756044] env[62503]: INFO nova.virt.vmwareapi.vm_util [None req-fc728101-2fc5-4ffa-a37b-1d922196d83a tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Created folder: Instances in parent group-v294580. [ 876.756044] env[62503]: DEBUG oslo.service.loopingcall [None req-fc728101-2fc5-4ffa-a37b-1d922196d83a tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 876.756044] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b6fddb0d-70f5-433f-a0ef-0d6bffb35579] Creating VM on the ESX host {{(pid=62503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 876.756266] env[62503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-61900352-da48-48f6-ac83-8c63fc3723ab {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.782781] env[62503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 876.782781] env[62503]: value = "task-1387819" [ 876.782781] env[62503]: _type = "Task" [ 876.782781] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 876.796906] env[62503]: DEBUG oslo_vmware.api [-] Task: {'id': task-1387819, 'name': CreateVM_Task} progress is 6%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.861416] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3319e2ea-3fa0-47ba-9e16-1840c4de600e {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.869430] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2009129b-057c-415c-ab98-8b7280a653b5 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.901594] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18e68ce2-941a-43f3-83cb-c467391c08d8 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.906356] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff3231a0-df6e-481f-b8a4-5dc00986a851 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.922098] env[62503]: DEBUG nova.compute.provider_tree [None req-a01f7696-ade1-4d90-9188-a82a8c2369af tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 877.145270] env[62503]: DEBUG nova.compute.manager [req-a7d657b9-26b2-4163-95e3-80570aa183c7 req-c3695a92-8285-4f0f-96c2-82ead10371d8 service nova] [instance: 6229dda6-90e8-457b-beb3-2107e3700b29] Received event network-vif-deleted-dd4c60a2-b788-4d39-a0ca-b15676519c29 {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 877.145686] env[62503]: INFO nova.compute.manager [req-a7d657b9-26b2-4163-95e3-80570aa183c7 req-c3695a92-8285-4f0f-96c2-82ead10371d8 service nova] [instance: 6229dda6-90e8-457b-beb3-2107e3700b29] Neutron deleted interface dd4c60a2-b788-4d39-a0ca-b15676519c29; detaching it from the instance and deleting it from the info cache [ 877.146128] env[62503]: DEBUG nova.network.neutron [req-a7d657b9-26b2-4163-95e3-80570aa183c7 req-c3695a92-8285-4f0f-96c2-82ead10371d8 service nova] [instance: 6229dda6-90e8-457b-beb3-2107e3700b29] Updating instance_info_cache with network_info: [{"id": "e4a6a500-8bcd-461e-8408-055dbdd07fe1", "address": "fa:16:3e:f0:ef:5a", "network": {"id": "21a0dca4-d82a-47f0-82ad-173cdb98c822", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-928622145", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.142", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "a5f8c12d03a0446988d5335c00cee0ff", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "54495d8d-2696-4f65-b925-e567abdc205f", "external-id": "nsx-vlan-transportzone-220", "segmentation_id": 220, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape4a6a500-8b", "ovs_interfaceid": "e4a6a500-8bcd-461e-8408-055dbdd07fe1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 877.191171] env[62503]: DEBUG oslo_vmware.api [None req-ca6b327a-4e4e-486e-a0f1-74ffee8f5abc tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] Task: {'id': task-1387816, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.194142] env[62503]: DEBUG oslo_concurrency.lockutils [None req-b540304e-d51f-4246-b92a-23f24c2fb495 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Lock "c990f365-97df-4203-bd8c-dab822b2d8c3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 75.158s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 877.294422] env[62503]: DEBUG oslo_vmware.api [-] Task: {'id': task-1387819, 'name': CreateVM_Task} progress is 99%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.425580] env[62503]: DEBUG nova.scheduler.client.report [None req-a01f7696-ade1-4d90-9188-a82a8c2369af tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 877.650370] env[62503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9d67c865-0a53-4530-a1b9-e73707be3011 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.660959] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-682af2d7-312c-4a03-a3a2-13f9f079860b {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.683258] env[62503]: DEBUG oslo_vmware.api [None req-ca6b327a-4e4e-486e-a0f1-74ffee8f5abc tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] Task: {'id': task-1387816, 'name': PowerOnVM_Task, 'duration_secs': 0.949902} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 877.689777] env[62503]: DEBUG nova.network.neutron [req-933df28b-6c8c-4ead-bdf9-e5b492dd5cb8 req-796aeb3a-57fd-429d-94c2-93133589b718 service nova] [instance: b6fddb0d-70f5-433f-a0ef-0d6bffb35579] Updated VIF entry in instance network info cache for port 24ac3187-6729-47ea-beb6-4c96018b8a05. {{(pid=62503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 877.690183] env[62503]: DEBUG nova.network.neutron [req-933df28b-6c8c-4ead-bdf9-e5b492dd5cb8 req-796aeb3a-57fd-429d-94c2-93133589b718 service nova] [instance: b6fddb0d-70f5-433f-a0ef-0d6bffb35579] Updating instance_info_cache with network_info: [{"id": "24ac3187-6729-47ea-beb6-4c96018b8a05", "address": "fa:16:3e:f9:f1:4f", "network": {"id": "1705446b-db63-4b14-9929-b692ca586b36", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1437181237-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e818e5ee9dc24efa96747c9558514a15", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "77aa121f-8fb6-42f3-aaea-43addfe449b2", "external-id": "nsx-vlan-transportzone-288", "segmentation_id": 288, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap24ac3187-67", "ovs_interfaceid": "24ac3187-6729-47ea-beb6-4c96018b8a05", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 877.691451] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca6b327a-4e4e-486e-a0f1-74ffee8f5abc tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] [instance: cf611345-d276-4745-a2f8-0551c9dca2c2] Powered on the VM {{(pid=62503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 877.691664] env[62503]: INFO nova.compute.manager [None req-ca6b327a-4e4e-486e-a0f1-74ffee8f5abc tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] [instance: cf611345-d276-4745-a2f8-0551c9dca2c2] Took 8.08 seconds to spawn the instance on the hypervisor. [ 877.691841] env[62503]: DEBUG nova.compute.manager [None req-ca6b327a-4e4e-486e-a0f1-74ffee8f5abc tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] [instance: cf611345-d276-4745-a2f8-0551c9dca2c2] Checking state {{(pid=62503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1794}} [ 877.692195] env[62503]: DEBUG nova.compute.manager [req-a7d657b9-26b2-4163-95e3-80570aa183c7 req-c3695a92-8285-4f0f-96c2-82ead10371d8 service nova] [instance: 6229dda6-90e8-457b-beb3-2107e3700b29] Detach interface failed, port_id=dd4c60a2-b788-4d39-a0ca-b15676519c29, reason: Instance 6229dda6-90e8-457b-beb3-2107e3700b29 could not be found. {{(pid=62503) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11261}} [ 877.693583] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4142013-fca2-4f73-a2af-5d7da8cbeba6 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.730281] env[62503]: DEBUG nova.compute.manager [None req-157c52a0-86d9-4396-8baf-ed1051ee6632 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: e693bcc2-3883-466d-913c-831146ca81e7] Start spawning the instance on the hypervisor. {{(pid=62503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2641}} [ 877.761173] env[62503]: DEBUG nova.virt.hardware [None req-157c52a0-86d9-4396-8baf-ed1051ee6632 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:26:20Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:26:03Z,direct_url=,disk_format='vmdk',id=8150ca02-f879-471d-8913-459408f127a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26d9ee75d25b4018b8bb1fb11c8bd98c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:26:04Z,virtual_size=,visibility=), allow threads: False {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 877.761389] env[62503]: DEBUG nova.virt.hardware [None req-157c52a0-86d9-4396-8baf-ed1051ee6632 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Flavor limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 877.761526] env[62503]: DEBUG nova.virt.hardware [None req-157c52a0-86d9-4396-8baf-ed1051ee6632 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Image limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 877.761934] env[62503]: DEBUG nova.virt.hardware [None req-157c52a0-86d9-4396-8baf-ed1051ee6632 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Flavor pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 877.761934] env[62503]: DEBUG nova.virt.hardware [None req-157c52a0-86d9-4396-8baf-ed1051ee6632 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Image pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 877.762128] env[62503]: DEBUG nova.virt.hardware [None req-157c52a0-86d9-4396-8baf-ed1051ee6632 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 877.762239] env[62503]: DEBUG nova.virt.hardware [None req-157c52a0-86d9-4396-8baf-ed1051ee6632 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 877.765074] env[62503]: DEBUG nova.virt.hardware [None req-157c52a0-86d9-4396-8baf-ed1051ee6632 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 877.765074] env[62503]: DEBUG nova.virt.hardware [None req-157c52a0-86d9-4396-8baf-ed1051ee6632 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Got 1 possible topologies {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 877.765074] env[62503]: DEBUG nova.virt.hardware [None req-157c52a0-86d9-4396-8baf-ed1051ee6632 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 877.765074] env[62503]: DEBUG nova.virt.hardware [None req-157c52a0-86d9-4396-8baf-ed1051ee6632 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 877.765074] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c84c70ef-bb4e-49bc-ab2b-4ee0437d38f6 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.779725] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea968487-d541-40c6-a9f7-0ce7b1c0a7d3 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.809771] env[62503]: DEBUG oslo_vmware.api [-] Task: {'id': task-1387819, 'name': CreateVM_Task} progress is 99%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.931054] env[62503]: DEBUG oslo_concurrency.lockutils [None req-a01f7696-ade1-4d90-9188-a82a8c2369af tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.236s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 877.931669] env[62503]: DEBUG nova.compute.manager [None req-a01f7696-ade1-4d90-9188-a82a8c2369af tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: c9129f68-c755-4b78-b067-b77b01048c02] Start building networks asynchronously for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2832}} [ 877.938500] env[62503]: DEBUG oslo_concurrency.lockutils [None req-d3b4e233-4266-4cfe-a227-3bff2b6257fb tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.617s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 877.938500] env[62503]: INFO nova.compute.claims [None req-d3b4e233-4266-4cfe-a227-3bff2b6257fb tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: 16167e53-e45b-4b37-90c6-ab2c30ebf1aa] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 878.130667] env[62503]: DEBUG nova.network.neutron [-] [instance: 6229dda6-90e8-457b-beb3-2107e3700b29] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 878.194211] env[62503]: DEBUG oslo_concurrency.lockutils [req-933df28b-6c8c-4ead-bdf9-e5b492dd5cb8 req-796aeb3a-57fd-429d-94c2-93133589b718 service nova] Releasing lock "refresh_cache-b6fddb0d-70f5-433f-a0ef-0d6bffb35579" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 878.212158] env[62503]: INFO nova.compute.manager [None req-ca6b327a-4e4e-486e-a0f1-74ffee8f5abc tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] [instance: cf611345-d276-4745-a2f8-0551c9dca2c2] Took 30.69 seconds to build instance. [ 878.294676] env[62503]: DEBUG oslo_vmware.api [-] Task: {'id': task-1387819, 'name': CreateVM_Task, 'duration_secs': 1.404935} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 878.299571] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b6fddb0d-70f5-433f-a0ef-0d6bffb35579] Created VM on the ESX host {{(pid=62503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 878.299571] env[62503]: DEBUG oslo_concurrency.lockutils [None req-fc728101-2fc5-4ffa-a37b-1d922196d83a tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 878.299571] env[62503]: DEBUG oslo_concurrency.lockutils [None req-fc728101-2fc5-4ffa-a37b-1d922196d83a tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Acquired lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 878.299571] env[62503]: DEBUG oslo_concurrency.lockutils [None req-fc728101-2fc5-4ffa-a37b-1d922196d83a tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 878.299571] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-75d155a8-b071-4799-be98-6e811e7ac69e {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.306024] env[62503]: DEBUG oslo_vmware.api [None req-fc728101-2fc5-4ffa-a37b-1d922196d83a tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Waiting for the task: (returnval){ [ 878.306024] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]5276ac6e-7acd-cb74-17fe-3cf60b3c986b" [ 878.306024] env[62503]: _type = "Task" [ 878.306024] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.313407] env[62503]: DEBUG oslo_vmware.api [None req-fc728101-2fc5-4ffa-a37b-1d922196d83a tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]5276ac6e-7acd-cb74-17fe-3cf60b3c986b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.441055] env[62503]: DEBUG nova.compute.utils [None req-a01f7696-ade1-4d90-9188-a82a8c2369af tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Using /dev/sd instead of None {{(pid=62503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 878.447028] env[62503]: DEBUG nova.compute.manager [None req-a01f7696-ade1-4d90-9188-a82a8c2369af tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: c9129f68-c755-4b78-b067-b77b01048c02] Allocating IP information in the background. {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 878.447028] env[62503]: DEBUG nova.network.neutron [None req-a01f7696-ade1-4d90-9188-a82a8c2369af tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: c9129f68-c755-4b78-b067-b77b01048c02] allocate_for_instance() {{(pid=62503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 878.547474] env[62503]: DEBUG nova.policy [None req-a01f7696-ade1-4d90-9188-a82a8c2369af tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0b072e4c8ef94b26895d59ede518aaad', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0849093c8b48400a8e9d56171ea99e8f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62503) authorize /opt/stack/nova/nova/policy.py:201}} [ 878.634047] env[62503]: INFO nova.compute.manager [-] [instance: 6229dda6-90e8-457b-beb3-2107e3700b29] Took 1.93 seconds to deallocate network for instance. [ 878.715573] env[62503]: DEBUG oslo_concurrency.lockutils [None req-ca6b327a-4e4e-486e-a0f1-74ffee8f5abc tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] Lock "cf611345-d276-4745-a2f8-0551c9dca2c2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 63.207s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 878.767142] env[62503]: DEBUG nova.compute.manager [req-f10e6613-8854-478b-9b19-48288f7c68ea req-7fc52f31-0c4b-4022-b7dc-a631ed6fd64c service nova] [instance: e693bcc2-3883-466d-913c-831146ca81e7] Received event network-vif-plugged-71aa781d-4a40-4f00-8fb8-06cb4c73986a {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 878.767142] env[62503]: DEBUG oslo_concurrency.lockutils [req-f10e6613-8854-478b-9b19-48288f7c68ea req-7fc52f31-0c4b-4022-b7dc-a631ed6fd64c service nova] Acquiring lock "e693bcc2-3883-466d-913c-831146ca81e7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 878.767142] env[62503]: DEBUG oslo_concurrency.lockutils [req-f10e6613-8854-478b-9b19-48288f7c68ea req-7fc52f31-0c4b-4022-b7dc-a631ed6fd64c service nova] Lock "e693bcc2-3883-466d-913c-831146ca81e7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 878.767142] env[62503]: DEBUG oslo_concurrency.lockutils [req-f10e6613-8854-478b-9b19-48288f7c68ea req-7fc52f31-0c4b-4022-b7dc-a631ed6fd64c service nova] Lock "e693bcc2-3883-466d-913c-831146ca81e7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 878.767142] env[62503]: DEBUG nova.compute.manager [req-f10e6613-8854-478b-9b19-48288f7c68ea req-7fc52f31-0c4b-4022-b7dc-a631ed6fd64c service nova] [instance: e693bcc2-3883-466d-913c-831146ca81e7] No waiting events found dispatching network-vif-plugged-71aa781d-4a40-4f00-8fb8-06cb4c73986a {{(pid=62503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 878.767142] env[62503]: WARNING nova.compute.manager [req-f10e6613-8854-478b-9b19-48288f7c68ea req-7fc52f31-0c4b-4022-b7dc-a631ed6fd64c service nova] [instance: e693bcc2-3883-466d-913c-831146ca81e7] Received unexpected event network-vif-plugged-71aa781d-4a40-4f00-8fb8-06cb4c73986a for instance with vm_state building and task_state spawning. [ 878.820929] env[62503]: DEBUG oslo_vmware.api [None req-fc728101-2fc5-4ffa-a37b-1d922196d83a tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]5276ac6e-7acd-cb74-17fe-3cf60b3c986b, 'name': SearchDatastore_Task, 'duration_secs': 0.150438} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 878.821592] env[62503]: DEBUG oslo_concurrency.lockutils [None req-fc728101-2fc5-4ffa-a37b-1d922196d83a tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Releasing lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 878.822266] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-fc728101-2fc5-4ffa-a37b-1d922196d83a tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: b6fddb0d-70f5-433f-a0ef-0d6bffb35579] Processing image 8150ca02-f879-471d-8913-459408f127a1 {{(pid=62503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 878.822489] env[62503]: DEBUG oslo_concurrency.lockutils [None req-fc728101-2fc5-4ffa-a37b-1d922196d83a tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 878.822751] env[62503]: DEBUG oslo_concurrency.lockutils [None req-fc728101-2fc5-4ffa-a37b-1d922196d83a tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Acquired lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 878.823567] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-fc728101-2fc5-4ffa-a37b-1d922196d83a tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 878.823702] env[62503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d24ddd03-d9b8-4cf9-a30b-ce1cd66b1c7a {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.834011] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-fc728101-2fc5-4ffa-a37b-1d922196d83a tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 878.834324] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-fc728101-2fc5-4ffa-a37b-1d922196d83a tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 878.835644] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-23e76bc0-b101-4fe6-9215-11919b439ac7 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.841145] env[62503]: DEBUG oslo_vmware.api [None req-fc728101-2fc5-4ffa-a37b-1d922196d83a tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Waiting for the task: (returnval){ [ 878.841145] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52033eae-861c-bcc4-62fd-9a907af9664f" [ 878.841145] env[62503]: _type = "Task" [ 878.841145] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.853859] env[62503]: DEBUG oslo_vmware.api [None req-fc728101-2fc5-4ffa-a37b-1d922196d83a tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52033eae-861c-bcc4-62fd-9a907af9664f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.947475] env[62503]: DEBUG nova.compute.manager [None req-a01f7696-ade1-4d90-9188-a82a8c2369af tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: c9129f68-c755-4b78-b067-b77b01048c02] Start building block device mappings for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2867}} [ 879.120245] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08926274-490b-4b73-a0d8-d2e16397c943 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.132270] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8243444-ba99-4a23-a726-81f7cf91c98c {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.185383] env[62503]: DEBUG oslo_concurrency.lockutils [None req-65c4b598-043d-4ffb-9049-2bd83f4e3fd6 tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 879.187844] env[62503]: DEBUG nova.network.neutron [None req-157c52a0-86d9-4396-8baf-ed1051ee6632 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: e693bcc2-3883-466d-913c-831146ca81e7] Successfully updated port: 71aa781d-4a40-4f00-8fb8-06cb4c73986a {{(pid=62503) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 879.188518] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed909e1f-1096-4a35-8562-36eed96b4d9c {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.202234] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6793ed6b-5794-46af-b31d-f9c27d8c8c4b {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.219724] env[62503]: DEBUG nova.compute.provider_tree [None req-d3b4e233-4266-4cfe-a227-3bff2b6257fb tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 879.288494] env[62503]: DEBUG nova.compute.manager [req-15ca15cc-f8ad-46c5-a320-fc2314cfcb2c req-e869a301-573e-4121-98c3-9c250bb78519 service nova] [instance: 6229dda6-90e8-457b-beb3-2107e3700b29] Received event network-vif-deleted-e4a6a500-8bcd-461e-8408-055dbdd07fe1 {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 879.353370] env[62503]: DEBUG oslo_vmware.api [None req-fc728101-2fc5-4ffa-a37b-1d922196d83a tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52033eae-861c-bcc4-62fd-9a907af9664f, 'name': SearchDatastore_Task, 'duration_secs': 0.011454} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 879.354286] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4d71d684-37b4-42c6-8cd1-43485536e9cf {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.362184] env[62503]: DEBUG oslo_vmware.api [None req-fc728101-2fc5-4ffa-a37b-1d922196d83a tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Waiting for the task: (returnval){ [ 879.362184] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52648371-efb0-16fb-8e5c-8c350c567cd4" [ 879.362184] env[62503]: _type = "Task" [ 879.362184] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.372350] env[62503]: DEBUG oslo_vmware.api [None req-fc728101-2fc5-4ffa-a37b-1d922196d83a tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52648371-efb0-16fb-8e5c-8c350c567cd4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.461814] env[62503]: DEBUG nova.network.neutron [None req-a01f7696-ade1-4d90-9188-a82a8c2369af tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: c9129f68-c755-4b78-b067-b77b01048c02] Successfully created port: 792806f8-f6ea-4abd-9085-2a2ce83df26b {{(pid=62503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 879.697350] env[62503]: DEBUG oslo_concurrency.lockutils [None req-157c52a0-86d9-4396-8baf-ed1051ee6632 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Acquiring lock "refresh_cache-e693bcc2-3883-466d-913c-831146ca81e7" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 879.697350] env[62503]: DEBUG oslo_concurrency.lockutils [None req-157c52a0-86d9-4396-8baf-ed1051ee6632 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Acquired lock "refresh_cache-e693bcc2-3883-466d-913c-831146ca81e7" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 879.697350] env[62503]: DEBUG nova.network.neutron [None req-157c52a0-86d9-4396-8baf-ed1051ee6632 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: e693bcc2-3883-466d-913c-831146ca81e7] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 879.724192] env[62503]: DEBUG nova.scheduler.client.report [None req-d3b4e233-4266-4cfe-a227-3bff2b6257fb tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 879.880937] env[62503]: DEBUG oslo_vmware.api [None req-fc728101-2fc5-4ffa-a37b-1d922196d83a tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52648371-efb0-16fb-8e5c-8c350c567cd4, 'name': SearchDatastore_Task, 'duration_secs': 0.019761} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 879.881300] env[62503]: DEBUG oslo_concurrency.lockutils [None req-fc728101-2fc5-4ffa-a37b-1d922196d83a tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Releasing lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 879.881756] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc728101-2fc5-4ffa-a37b-1d922196d83a tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk to [datastore1] b6fddb0d-70f5-433f-a0ef-0d6bffb35579/b6fddb0d-70f5-433f-a0ef-0d6bffb35579.vmdk {{(pid=62503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 879.883031] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-619a614b-1fb3-447e-8742-dbf03afa81af {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.891825] env[62503]: DEBUG oslo_vmware.api [None req-fc728101-2fc5-4ffa-a37b-1d922196d83a tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Waiting for the task: (returnval){ [ 879.891825] env[62503]: value = "task-1387820" [ 879.891825] env[62503]: _type = "Task" [ 879.891825] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.901458] env[62503]: DEBUG oslo_vmware.api [None req-fc728101-2fc5-4ffa-a37b-1d922196d83a tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1387820, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.928060] env[62503]: DEBUG oslo_concurrency.lockutils [None req-bc583026-98f6-4899-b166-facb931d7a18 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Acquiring lock "ca1f1966-bfe1-495e-b055-f72150f72470" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 879.928391] env[62503]: DEBUG oslo_concurrency.lockutils [None req-bc583026-98f6-4899-b166-facb931d7a18 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Lock "ca1f1966-bfe1-495e-b055-f72150f72470" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 879.965523] env[62503]: DEBUG nova.compute.manager [None req-a01f7696-ade1-4d90-9188-a82a8c2369af tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: c9129f68-c755-4b78-b067-b77b01048c02] Start spawning the instance on the hypervisor. {{(pid=62503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2641}} [ 880.002170] env[62503]: DEBUG nova.virt.hardware [None req-a01f7696-ade1-4d90-9188-a82a8c2369af tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:26:20Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:26:03Z,direct_url=,disk_format='vmdk',id=8150ca02-f879-471d-8913-459408f127a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26d9ee75d25b4018b8bb1fb11c8bd98c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:26:04Z,virtual_size=,visibility=), allow threads: False {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 880.002460] env[62503]: DEBUG nova.virt.hardware [None req-a01f7696-ade1-4d90-9188-a82a8c2369af tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Flavor limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 880.002672] env[62503]: DEBUG nova.virt.hardware [None req-a01f7696-ade1-4d90-9188-a82a8c2369af tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Image limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 880.002889] env[62503]: DEBUG nova.virt.hardware [None req-a01f7696-ade1-4d90-9188-a82a8c2369af tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Flavor pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 880.003033] env[62503]: DEBUG nova.virt.hardware [None req-a01f7696-ade1-4d90-9188-a82a8c2369af tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Image pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 880.003192] env[62503]: DEBUG nova.virt.hardware [None req-a01f7696-ade1-4d90-9188-a82a8c2369af tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 880.003399] env[62503]: DEBUG nova.virt.hardware [None req-a01f7696-ade1-4d90-9188-a82a8c2369af tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 880.003548] env[62503]: DEBUG nova.virt.hardware [None req-a01f7696-ade1-4d90-9188-a82a8c2369af tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 880.003721] env[62503]: DEBUG nova.virt.hardware [None req-a01f7696-ade1-4d90-9188-a82a8c2369af tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Got 1 possible topologies {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 880.003888] env[62503]: DEBUG nova.virt.hardware [None req-a01f7696-ade1-4d90-9188-a82a8c2369af tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 880.004073] env[62503]: DEBUG nova.virt.hardware [None req-a01f7696-ade1-4d90-9188-a82a8c2369af tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 880.005208] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7fc23cc-3cc3-44af-bd18-fa7eba142aab {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.014666] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-895745a0-da08-4d48-8e15-0dc35af63ed7 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.230348] env[62503]: DEBUG oslo_concurrency.lockutils [None req-d3b4e233-4266-4cfe-a227-3bff2b6257fb tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.295s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 880.232852] env[62503]: DEBUG nova.compute.manager [None req-d3b4e233-4266-4cfe-a227-3bff2b6257fb tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: 16167e53-e45b-4b37-90c6-ab2c30ebf1aa] Start building networks asynchronously for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2832}} [ 880.237982] env[62503]: DEBUG oslo_concurrency.lockutils [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 8.492s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 880.259807] env[62503]: DEBUG nova.network.neutron [None req-157c52a0-86d9-4396-8baf-ed1051ee6632 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: e693bcc2-3883-466d-913c-831146ca81e7] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 880.358163] env[62503]: DEBUG oslo_vmware.rw_handles [None req-2ba47ee0-3aae-44b8-a61f-079e3e8e4a13 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52c99989-1aa6-c782-b1f4-9bc32e794a53/disk-0.vmdk. {{(pid=62503) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 880.359100] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbf724e3-3b28-483f-aa0c-0bf5e2365346 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.373021] env[62503]: DEBUG oslo_vmware.rw_handles [None req-2ba47ee0-3aae-44b8-a61f-079e3e8e4a13 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52c99989-1aa6-c782-b1f4-9bc32e794a53/disk-0.vmdk is in state: ready. {{(pid=62503) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 880.373275] env[62503]: ERROR oslo_vmware.rw_handles [None req-2ba47ee0-3aae-44b8-a61f-079e3e8e4a13 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52c99989-1aa6-c782-b1f4-9bc32e794a53/disk-0.vmdk due to incomplete transfer. [ 880.373591] env[62503]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-6a48ddfe-39cb-439b-ab60-3e0f0de2d7a4 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.386694] env[62503]: DEBUG oslo_vmware.rw_handles [None req-2ba47ee0-3aae-44b8-a61f-079e3e8e4a13 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52c99989-1aa6-c782-b1f4-9bc32e794a53/disk-0.vmdk. {{(pid=62503) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 880.386977] env[62503]: DEBUG nova.virt.vmwareapi.images [None req-2ba47ee0-3aae-44b8-a61f-079e3e8e4a13 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: 9ccdc727-536e-4db8-bad4-960858254758] Uploaded image 3306da0e-1d43-4f11-be11-5fe7cf1194eb to the Glance image server {{(pid=62503) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 880.389654] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ba47ee0-3aae-44b8-a61f-079e3e8e4a13 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: 9ccdc727-536e-4db8-bad4-960858254758] Destroying the VM {{(pid=62503) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 880.390668] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-9f78d83a-4db8-4319-b43c-e92dd511af97 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.399378] env[62503]: DEBUG oslo_vmware.api [None req-2ba47ee0-3aae-44b8-a61f-079e3e8e4a13 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Waiting for the task: (returnval){ [ 880.399378] env[62503]: value = "task-1387821" [ 880.399378] env[62503]: _type = "Task" [ 880.399378] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 880.403822] env[62503]: DEBUG oslo_vmware.api [None req-fc728101-2fc5-4ffa-a37b-1d922196d83a tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1387820, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.415313] env[62503]: DEBUG oslo_vmware.api [None req-2ba47ee0-3aae-44b8-a61f-079e3e8e4a13 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Task: {'id': task-1387821, 'name': Destroy_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.430546] env[62503]: DEBUG nova.compute.manager [None req-bc583026-98f6-4899-b166-facb931d7a18 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: ca1f1966-bfe1-495e-b055-f72150f72470] Starting instance... {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 880.578875] env[62503]: DEBUG nova.network.neutron [None req-157c52a0-86d9-4396-8baf-ed1051ee6632 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: e693bcc2-3883-466d-913c-831146ca81e7] Updating instance_info_cache with network_info: [{"id": "71aa781d-4a40-4f00-8fb8-06cb4c73986a", "address": "fa:16:3e:24:a7:1c", "network": {"id": "ef338e2b-4aa8-4605-8748-910d0d3a3079", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-477257305-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "12a42517cf8f4ad3836f2f95e8833dd4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "13e71dbb-4279-427c-b39d-ba5df9895e58", "external-id": "nsx-vlan-transportzone-417", "segmentation_id": 417, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap71aa781d-4a", "ovs_interfaceid": "71aa781d-4a40-4f00-8fb8-06cb4c73986a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 880.741607] env[62503]: DEBUG nova.compute.utils [None req-d3b4e233-4266-4cfe-a227-3bff2b6257fb tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Using /dev/sd instead of None {{(pid=62503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 880.741607] env[62503]: DEBUG nova.compute.manager [None req-d3b4e233-4266-4cfe-a227-3bff2b6257fb tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: 16167e53-e45b-4b37-90c6-ab2c30ebf1aa] Allocating IP information in the background. {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 880.742365] env[62503]: DEBUG nova.network.neutron [None req-d3b4e233-4266-4cfe-a227-3bff2b6257fb tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: 16167e53-e45b-4b37-90c6-ab2c30ebf1aa] allocate_for_instance() {{(pid=62503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 880.840725] env[62503]: DEBUG nova.policy [None req-d3b4e233-4266-4cfe-a227-3bff2b6257fb tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b0e90b6b4c414583af760c03e336e4d1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f823912f7b1a4998a6dbc22060cf6c5e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62503) authorize /opt/stack/nova/nova/policy.py:201}} [ 880.847353] env[62503]: DEBUG oslo_concurrency.lockutils [None req-ff3bdbcf-6ae4-49c9-ba46-53b2b0c34193 tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] Acquiring lock "cf611345-d276-4745-a2f8-0551c9dca2c2" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 880.847625] env[62503]: DEBUG oslo_concurrency.lockutils [None req-ff3bdbcf-6ae4-49c9-ba46-53b2b0c34193 tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] Lock "cf611345-d276-4745-a2f8-0551c9dca2c2" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 880.848856] env[62503]: INFO nova.compute.manager [None req-ff3bdbcf-6ae4-49c9-ba46-53b2b0c34193 tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] [instance: cf611345-d276-4745-a2f8-0551c9dca2c2] Rebooting instance [ 880.853533] env[62503]: DEBUG nova.compute.manager [req-d9c2d5a2-0f20-488b-bac8-ec251d686e19 req-00129a1f-b23e-47ef-9e51-6619c1fc11ac service nova] [instance: e693bcc2-3883-466d-913c-831146ca81e7] Received event network-changed-71aa781d-4a40-4f00-8fb8-06cb4c73986a {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 880.853533] env[62503]: DEBUG nova.compute.manager [req-d9c2d5a2-0f20-488b-bac8-ec251d686e19 req-00129a1f-b23e-47ef-9e51-6619c1fc11ac service nova] [instance: e693bcc2-3883-466d-913c-831146ca81e7] Refreshing instance network info cache due to event network-changed-71aa781d-4a40-4f00-8fb8-06cb4c73986a. {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11432}} [ 880.853533] env[62503]: DEBUG oslo_concurrency.lockutils [req-d9c2d5a2-0f20-488b-bac8-ec251d686e19 req-00129a1f-b23e-47ef-9e51-6619c1fc11ac service nova] Acquiring lock "refresh_cache-e693bcc2-3883-466d-913c-831146ca81e7" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 880.903989] env[62503]: DEBUG oslo_vmware.api [None req-fc728101-2fc5-4ffa-a37b-1d922196d83a tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1387820, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.795597} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 880.907729] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc728101-2fc5-4ffa-a37b-1d922196d83a tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk to [datastore1] b6fddb0d-70f5-433f-a0ef-0d6bffb35579/b6fddb0d-70f5-433f-a0ef-0d6bffb35579.vmdk {{(pid=62503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 880.908270] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-fc728101-2fc5-4ffa-a37b-1d922196d83a tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: b6fddb0d-70f5-433f-a0ef-0d6bffb35579] Extending root virtual disk to 1048576 {{(pid=62503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 880.908523] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5e8c6d88-7784-4934-a951-536be61017e5 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.916610] env[62503]: DEBUG oslo_vmware.api [None req-2ba47ee0-3aae-44b8-a61f-079e3e8e4a13 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Task: {'id': task-1387821, 'name': Destroy_Task} progress is 33%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.917974] env[62503]: DEBUG oslo_vmware.api [None req-fc728101-2fc5-4ffa-a37b-1d922196d83a tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Waiting for the task: (returnval){ [ 880.917974] env[62503]: value = "task-1387822" [ 880.917974] env[62503]: _type = "Task" [ 880.917974] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 880.927501] env[62503]: DEBUG oslo_vmware.api [None req-fc728101-2fc5-4ffa-a37b-1d922196d83a tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1387822, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.966472] env[62503]: DEBUG oslo_concurrency.lockutils [None req-bc583026-98f6-4899-b166-facb931d7a18 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 881.082426] env[62503]: DEBUG oslo_concurrency.lockutils [None req-157c52a0-86d9-4396-8baf-ed1051ee6632 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Releasing lock "refresh_cache-e693bcc2-3883-466d-913c-831146ca81e7" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 881.082426] env[62503]: DEBUG nova.compute.manager [None req-157c52a0-86d9-4396-8baf-ed1051ee6632 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: e693bcc2-3883-466d-913c-831146ca81e7] Instance network_info: |[{"id": "71aa781d-4a40-4f00-8fb8-06cb4c73986a", "address": "fa:16:3e:24:a7:1c", "network": {"id": "ef338e2b-4aa8-4605-8748-910d0d3a3079", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-477257305-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "12a42517cf8f4ad3836f2f95e8833dd4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "13e71dbb-4279-427c-b39d-ba5df9895e58", "external-id": "nsx-vlan-transportzone-417", "segmentation_id": 417, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap71aa781d-4a", "ovs_interfaceid": "71aa781d-4a40-4f00-8fb8-06cb4c73986a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1999}} [ 881.082426] env[62503]: DEBUG oslo_concurrency.lockutils [req-d9c2d5a2-0f20-488b-bac8-ec251d686e19 req-00129a1f-b23e-47ef-9e51-6619c1fc11ac service nova] Acquired lock "refresh_cache-e693bcc2-3883-466d-913c-831146ca81e7" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 881.082626] env[62503]: DEBUG nova.network.neutron [req-d9c2d5a2-0f20-488b-bac8-ec251d686e19 req-00129a1f-b23e-47ef-9e51-6619c1fc11ac service nova] [instance: e693bcc2-3883-466d-913c-831146ca81e7] Refreshing network info cache for port 71aa781d-4a40-4f00-8fb8-06cb4c73986a {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 881.083764] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-157c52a0-86d9-4396-8baf-ed1051ee6632 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: e693bcc2-3883-466d-913c-831146ca81e7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:24:a7:1c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '13e71dbb-4279-427c-b39d-ba5df9895e58', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '71aa781d-4a40-4f00-8fb8-06cb4c73986a', 'vif_model': 'vmxnet3'}] {{(pid=62503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 881.092329] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-157c52a0-86d9-4396-8baf-ed1051ee6632 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Creating folder: Project (12a42517cf8f4ad3836f2f95e8833dd4). Parent ref: group-v294540. {{(pid=62503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 881.092329] env[62503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-899358db-9615-4d1e-8927-b7be23fdb842 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.103086] env[62503]: INFO nova.virt.vmwareapi.vm_util [None req-157c52a0-86d9-4396-8baf-ed1051ee6632 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Created folder: Project (12a42517cf8f4ad3836f2f95e8833dd4) in parent group-v294540. [ 881.103086] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-157c52a0-86d9-4396-8baf-ed1051ee6632 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Creating folder: Instances. Parent ref: group-v294583. {{(pid=62503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 881.103206] env[62503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e74fcf60-cf6c-457e-9663-0d76aa0d96b8 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.112154] env[62503]: INFO nova.virt.vmwareapi.vm_util [None req-157c52a0-86d9-4396-8baf-ed1051ee6632 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Created folder: Instances in parent group-v294583. [ 881.112352] env[62503]: DEBUG oslo.service.loopingcall [None req-157c52a0-86d9-4396-8baf-ed1051ee6632 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 881.112592] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e693bcc2-3883-466d-913c-831146ca81e7] Creating VM on the ESX host {{(pid=62503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 881.112801] env[62503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-590a8990-29e3-4eff-9f7f-a7af2a313bbf {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.131353] env[62503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 881.131353] env[62503]: value = "task-1387825" [ 881.131353] env[62503]: _type = "Task" [ 881.131353] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 881.139226] env[62503]: DEBUG oslo_vmware.api [-] Task: {'id': task-1387825, 'name': CreateVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.225183] env[62503]: DEBUG nova.network.neutron [None req-d3b4e233-4266-4cfe-a227-3bff2b6257fb tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: 16167e53-e45b-4b37-90c6-ab2c30ebf1aa] Successfully created port: 632bec04-d9b9-4178-bb3c-104caa5ee159 {{(pid=62503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 881.248976] env[62503]: DEBUG nova.compute.manager [None req-d3b4e233-4266-4cfe-a227-3bff2b6257fb tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: 16167e53-e45b-4b37-90c6-ab2c30ebf1aa] Start building block device mappings for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2867}} [ 881.276110] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Instance 9ccdc727-536e-4db8-bad4-960858254758 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 881.277170] env[62503]: WARNING nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Instance 6229dda6-90e8-457b-beb3-2107e3700b29 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 881.277576] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Instance cf611345-d276-4745-a2f8-0551c9dca2c2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 881.278492] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Instance b6fddb0d-70f5-433f-a0ef-0d6bffb35579 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 881.278665] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Instance e693bcc2-3883-466d-913c-831146ca81e7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 881.278665] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Instance c9129f68-c755-4b78-b067-b77b01048c02 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 881.278793] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Instance 16167e53-e45b-4b37-90c6-ab2c30ebf1aa actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 881.383110] env[62503]: DEBUG oslo_concurrency.lockutils [None req-ff3bdbcf-6ae4-49c9-ba46-53b2b0c34193 tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] Acquiring lock "refresh_cache-cf611345-d276-4745-a2f8-0551c9dca2c2" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 881.383676] env[62503]: DEBUG oslo_concurrency.lockutils [None req-ff3bdbcf-6ae4-49c9-ba46-53b2b0c34193 tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] Acquired lock "refresh_cache-cf611345-d276-4745-a2f8-0551c9dca2c2" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 881.383676] env[62503]: DEBUG nova.network.neutron [None req-ff3bdbcf-6ae4-49c9-ba46-53b2b0c34193 tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] [instance: cf611345-d276-4745-a2f8-0551c9dca2c2] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 881.413781] env[62503]: DEBUG oslo_vmware.api [None req-2ba47ee0-3aae-44b8-a61f-079e3e8e4a13 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Task: {'id': task-1387821, 'name': Destroy_Task, 'duration_secs': 0.592298} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 881.415904] env[62503]: INFO nova.virt.vmwareapi.vm_util [None req-2ba47ee0-3aae-44b8-a61f-079e3e8e4a13 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: 9ccdc727-536e-4db8-bad4-960858254758] Destroyed the VM [ 881.415904] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-2ba47ee0-3aae-44b8-a61f-079e3e8e4a13 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: 9ccdc727-536e-4db8-bad4-960858254758] Deleting Snapshot of the VM instance {{(pid=62503) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 881.415904] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-51fcc293-47c2-445c-a903-0dcab14d8072 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.423064] env[62503]: DEBUG oslo_vmware.api [None req-2ba47ee0-3aae-44b8-a61f-079e3e8e4a13 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Waiting for the task: (returnval){ [ 881.423064] env[62503]: value = "task-1387826" [ 881.423064] env[62503]: _type = "Task" [ 881.423064] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 881.431487] env[62503]: DEBUG oslo_vmware.api [None req-fc728101-2fc5-4ffa-a37b-1d922196d83a tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1387822, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07368} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 881.431950] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-fc728101-2fc5-4ffa-a37b-1d922196d83a tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: b6fddb0d-70f5-433f-a0ef-0d6bffb35579] Extended root virtual disk {{(pid=62503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 881.432864] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-364e6122-fd3f-4d53-b58e-1fe4fc0b7885 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.438778] env[62503]: DEBUG oslo_vmware.api [None req-2ba47ee0-3aae-44b8-a61f-079e3e8e4a13 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Task: {'id': task-1387826, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.461706] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-fc728101-2fc5-4ffa-a37b-1d922196d83a tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: b6fddb0d-70f5-433f-a0ef-0d6bffb35579] Reconfiguring VM instance instance-00000047 to attach disk [datastore1] b6fddb0d-70f5-433f-a0ef-0d6bffb35579/b6fddb0d-70f5-433f-a0ef-0d6bffb35579.vmdk or device None with type sparse {{(pid=62503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 881.462237] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ce272fba-3901-4592-8f1f-f41ca1b42c7a {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.482751] env[62503]: DEBUG oslo_vmware.api [None req-fc728101-2fc5-4ffa-a37b-1d922196d83a tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Waiting for the task: (returnval){ [ 881.482751] env[62503]: value = "task-1387827" [ 881.482751] env[62503]: _type = "Task" [ 881.482751] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 881.492466] env[62503]: DEBUG oslo_vmware.api [None req-fc728101-2fc5-4ffa-a37b-1d922196d83a tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1387827, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.644936] env[62503]: DEBUG oslo_vmware.api [-] Task: {'id': task-1387825, 'name': CreateVM_Task} progress is 99%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.783916] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Instance ca1f1966-bfe1-495e-b055-f72150f72470 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 881.783916] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Total usable vcpus: 48, total allocated vcpus: 6 {{(pid=62503) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 881.783916] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1664MB phys_disk=200GB used_disk=6GB total_vcpus=48 used_vcpus=6 pci_stats=[] {{(pid=62503) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 881.891848] env[62503]: DEBUG nova.network.neutron [None req-a01f7696-ade1-4d90-9188-a82a8c2369af tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: c9129f68-c755-4b78-b067-b77b01048c02] Successfully updated port: 792806f8-f6ea-4abd-9085-2a2ce83df26b {{(pid=62503) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 881.946845] env[62503]: DEBUG oslo_concurrency.lockutils [None req-409bdb25-8a95-4fcd-b277-69a03419ff2a tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Acquiring lock "629054bb-8fdb-45a2-8c07-216c4104d4a6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 881.947217] env[62503]: DEBUG oslo_concurrency.lockutils [None req-409bdb25-8a95-4fcd-b277-69a03419ff2a tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Lock "629054bb-8fdb-45a2-8c07-216c4104d4a6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 881.954331] env[62503]: DEBUG nova.network.neutron [req-d9c2d5a2-0f20-488b-bac8-ec251d686e19 req-00129a1f-b23e-47ef-9e51-6619c1fc11ac service nova] [instance: e693bcc2-3883-466d-913c-831146ca81e7] Updated VIF entry in instance network info cache for port 71aa781d-4a40-4f00-8fb8-06cb4c73986a. {{(pid=62503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 881.954773] env[62503]: DEBUG nova.network.neutron [req-d9c2d5a2-0f20-488b-bac8-ec251d686e19 req-00129a1f-b23e-47ef-9e51-6619c1fc11ac service nova] [instance: e693bcc2-3883-466d-913c-831146ca81e7] Updating instance_info_cache with network_info: [{"id": "71aa781d-4a40-4f00-8fb8-06cb4c73986a", "address": "fa:16:3e:24:a7:1c", "network": {"id": "ef338e2b-4aa8-4605-8748-910d0d3a3079", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-477257305-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "12a42517cf8f4ad3836f2f95e8833dd4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "13e71dbb-4279-427c-b39d-ba5df9895e58", "external-id": "nsx-vlan-transportzone-417", "segmentation_id": 417, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap71aa781d-4a", "ovs_interfaceid": "71aa781d-4a40-4f00-8fb8-06cb4c73986a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 881.956735] env[62503]: DEBUG oslo_vmware.api [None req-2ba47ee0-3aae-44b8-a61f-079e3e8e4a13 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Task: {'id': task-1387826, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.964012] env[62503]: DEBUG oslo_concurrency.lockutils [None req-784ff18d-15f2-4cb8-8623-ff87e0cf2977 tempest-InstanceActionsTestJSON-257954437 tempest-InstanceActionsTestJSON-257954437-project-member] Acquiring lock "c39e7ee3-1b97-44ec-92d6-733976c0f0f8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 881.964196] env[62503]: DEBUG oslo_concurrency.lockutils [None req-784ff18d-15f2-4cb8-8623-ff87e0cf2977 tempest-InstanceActionsTestJSON-257954437 tempest-InstanceActionsTestJSON-257954437-project-member] Lock "c39e7ee3-1b97-44ec-92d6-733976c0f0f8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 881.995508] env[62503]: DEBUG oslo_vmware.api [None req-fc728101-2fc5-4ffa-a37b-1d922196d83a tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1387827, 'name': ReconfigVM_Task, 'duration_secs': 0.388223} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 881.998364] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-fc728101-2fc5-4ffa-a37b-1d922196d83a tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: b6fddb0d-70f5-433f-a0ef-0d6bffb35579] Reconfigured VM instance instance-00000047 to attach disk [datastore1] b6fddb0d-70f5-433f-a0ef-0d6bffb35579/b6fddb0d-70f5-433f-a0ef-0d6bffb35579.vmdk or device None with type sparse {{(pid=62503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 881.999539] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7ebc1195-0777-4486-9704-6e8738f26278 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.005896] env[62503]: DEBUG oslo_vmware.api [None req-fc728101-2fc5-4ffa-a37b-1d922196d83a tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Waiting for the task: (returnval){ [ 882.005896] env[62503]: value = "task-1387828" [ 882.005896] env[62503]: _type = "Task" [ 882.005896] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.014953] env[62503]: DEBUG oslo_vmware.api [None req-fc728101-2fc5-4ffa-a37b-1d922196d83a tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1387828, 'name': Rename_Task} progress is 5%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.020231] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-864fc087-6042-45e5-9d8c-c1854e3c9487 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.026851] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7546c864-f6fe-4cd2-b0a8-406f48a2851c {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.066284] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76707181-e62d-4d60-99e4-e78199320909 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.075162] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8ac0ee6-ae25-41cd-b8ab-a622dc71372c {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.089636] env[62503]: DEBUG nova.compute.provider_tree [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 882.142956] env[62503]: DEBUG oslo_vmware.api [-] Task: {'id': task-1387825, 'name': CreateVM_Task, 'duration_secs': 0.596023} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.143320] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e693bcc2-3883-466d-913c-831146ca81e7] Created VM on the ESX host {{(pid=62503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 882.144262] env[62503]: DEBUG oslo_concurrency.lockutils [None req-157c52a0-86d9-4396-8baf-ed1051ee6632 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 882.144498] env[62503]: DEBUG oslo_concurrency.lockutils [None req-157c52a0-86d9-4396-8baf-ed1051ee6632 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 882.144870] env[62503]: DEBUG oslo_concurrency.lockutils [None req-157c52a0-86d9-4396-8baf-ed1051ee6632 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 882.145384] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-25f109e6-01a1-46f5-b35f-80a8f4a3da4f {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.152470] env[62503]: DEBUG oslo_vmware.api [None req-157c52a0-86d9-4396-8baf-ed1051ee6632 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Waiting for the task: (returnval){ [ 882.152470] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]527feff2-c6df-5877-973f-fa1d69c40a4c" [ 882.152470] env[62503]: _type = "Task" [ 882.152470] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.160801] env[62503]: DEBUG oslo_vmware.api [None req-157c52a0-86d9-4396-8baf-ed1051ee6632 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]527feff2-c6df-5877-973f-fa1d69c40a4c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.209278] env[62503]: DEBUG oslo_concurrency.lockutils [None req-30e8662c-df08-452b-b0da-6a8680594759 tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Acquiring lock "7b8c670d-3f2a-431d-91da-4ced781e6e51" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 882.209898] env[62503]: DEBUG oslo_concurrency.lockutils [None req-30e8662c-df08-452b-b0da-6a8680594759 tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Lock "7b8c670d-3f2a-431d-91da-4ced781e6e51" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 882.252042] env[62503]: DEBUG nova.network.neutron [None req-ff3bdbcf-6ae4-49c9-ba46-53b2b0c34193 tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] [instance: cf611345-d276-4745-a2f8-0551c9dca2c2] Updating instance_info_cache with network_info: [{"id": "6a859ef4-58a2-46ba-9e2e-b0857df9cfea", "address": "fa:16:3e:b1:7b:26", "network": {"id": "f6605088-4934-47d3-a3a3-7d24821eaddf", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1250737836-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d404225d6f9c46148e0b7080ec1eee99", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "16c6ea68-9b0e-4ac0-a484-7a9a40533017", "external-id": "nsx-vlan-transportzone-384", "segmentation_id": 384, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6a859ef4-58", "ovs_interfaceid": "6a859ef4-58a2-46ba-9e2e-b0857df9cfea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 882.268369] env[62503]: DEBUG nova.compute.manager [None req-d3b4e233-4266-4cfe-a227-3bff2b6257fb tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: 16167e53-e45b-4b37-90c6-ab2c30ebf1aa] Start spawning the instance on the hypervisor. {{(pid=62503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2641}} [ 882.294896] env[62503]: DEBUG nova.virt.hardware [None req-d3b4e233-4266-4cfe-a227-3bff2b6257fb tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:26:20Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:26:03Z,direct_url=,disk_format='vmdk',id=8150ca02-f879-471d-8913-459408f127a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26d9ee75d25b4018b8bb1fb11c8bd98c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:26:04Z,virtual_size=,visibility=), allow threads: False {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 882.295189] env[62503]: DEBUG nova.virt.hardware [None req-d3b4e233-4266-4cfe-a227-3bff2b6257fb tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Flavor limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 882.295362] env[62503]: DEBUG nova.virt.hardware [None req-d3b4e233-4266-4cfe-a227-3bff2b6257fb tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Image limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 882.295620] env[62503]: DEBUG nova.virt.hardware [None req-d3b4e233-4266-4cfe-a227-3bff2b6257fb tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Flavor pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 882.295688] env[62503]: DEBUG nova.virt.hardware [None req-d3b4e233-4266-4cfe-a227-3bff2b6257fb tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Image pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 882.295835] env[62503]: DEBUG nova.virt.hardware [None req-d3b4e233-4266-4cfe-a227-3bff2b6257fb tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 882.296202] env[62503]: DEBUG nova.virt.hardware [None req-d3b4e233-4266-4cfe-a227-3bff2b6257fb tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 882.296279] env[62503]: DEBUG nova.virt.hardware [None req-d3b4e233-4266-4cfe-a227-3bff2b6257fb tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 882.296460] env[62503]: DEBUG nova.virt.hardware [None req-d3b4e233-4266-4cfe-a227-3bff2b6257fb tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Got 1 possible topologies {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 882.296625] env[62503]: DEBUG nova.virt.hardware [None req-d3b4e233-4266-4cfe-a227-3bff2b6257fb tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 882.296802] env[62503]: DEBUG nova.virt.hardware [None req-d3b4e233-4266-4cfe-a227-3bff2b6257fb tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 882.297692] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a2b9f59-1725-41af-bb5b-4e069843c1e7 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.309455] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-916cca67-10c2-4a12-b506-1990a195b366 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.401143] env[62503]: DEBUG oslo_concurrency.lockutils [None req-a01f7696-ade1-4d90-9188-a82a8c2369af tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Acquiring lock "refresh_cache-c9129f68-c755-4b78-b067-b77b01048c02" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 882.401307] env[62503]: DEBUG oslo_concurrency.lockutils [None req-a01f7696-ade1-4d90-9188-a82a8c2369af tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Acquired lock "refresh_cache-c9129f68-c755-4b78-b067-b77b01048c02" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 882.401455] env[62503]: DEBUG nova.network.neutron [None req-a01f7696-ade1-4d90-9188-a82a8c2369af tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: c9129f68-c755-4b78-b067-b77b01048c02] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 882.434907] env[62503]: DEBUG oslo_vmware.api [None req-2ba47ee0-3aae-44b8-a61f-079e3e8e4a13 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Task: {'id': task-1387826, 'name': RemoveSnapshot_Task, 'duration_secs': 0.699186} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.435231] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-2ba47ee0-3aae-44b8-a61f-079e3e8e4a13 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: 9ccdc727-536e-4db8-bad4-960858254758] Deleted Snapshot of the VM instance {{(pid=62503) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 882.435475] env[62503]: INFO nova.compute.manager [None req-2ba47ee0-3aae-44b8-a61f-079e3e8e4a13 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: 9ccdc727-536e-4db8-bad4-960858254758] Took 14.31 seconds to snapshot the instance on the hypervisor. [ 882.450600] env[62503]: DEBUG nova.compute.manager [None req-409bdb25-8a95-4fcd-b277-69a03419ff2a tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] [instance: 629054bb-8fdb-45a2-8c07-216c4104d4a6] Starting instance... {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 882.459193] env[62503]: DEBUG oslo_concurrency.lockutils [req-d9c2d5a2-0f20-488b-bac8-ec251d686e19 req-00129a1f-b23e-47ef-9e51-6619c1fc11ac service nova] Releasing lock "refresh_cache-e693bcc2-3883-466d-913c-831146ca81e7" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 882.459343] env[62503]: DEBUG nova.compute.manager [req-d9c2d5a2-0f20-488b-bac8-ec251d686e19 req-00129a1f-b23e-47ef-9e51-6619c1fc11ac service nova] [instance: cf611345-d276-4745-a2f8-0551c9dca2c2] Received event network-changed-6a859ef4-58a2-46ba-9e2e-b0857df9cfea {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 882.459516] env[62503]: DEBUG nova.compute.manager [req-d9c2d5a2-0f20-488b-bac8-ec251d686e19 req-00129a1f-b23e-47ef-9e51-6619c1fc11ac service nova] [instance: cf611345-d276-4745-a2f8-0551c9dca2c2] Refreshing instance network info cache due to event network-changed-6a859ef4-58a2-46ba-9e2e-b0857df9cfea. {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11432}} [ 882.459704] env[62503]: DEBUG oslo_concurrency.lockutils [req-d9c2d5a2-0f20-488b-bac8-ec251d686e19 req-00129a1f-b23e-47ef-9e51-6619c1fc11ac service nova] Acquiring lock "refresh_cache-cf611345-d276-4745-a2f8-0551c9dca2c2" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 882.468489] env[62503]: DEBUG nova.compute.manager [None req-784ff18d-15f2-4cb8-8623-ff87e0cf2977 tempest-InstanceActionsTestJSON-257954437 tempest-InstanceActionsTestJSON-257954437-project-member] [instance: c39e7ee3-1b97-44ec-92d6-733976c0f0f8] Starting instance... {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 882.520686] env[62503]: DEBUG oslo_vmware.api [None req-fc728101-2fc5-4ffa-a37b-1d922196d83a tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1387828, 'name': Rename_Task, 'duration_secs': 0.162713} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.520686] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc728101-2fc5-4ffa-a37b-1d922196d83a tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: b6fddb0d-70f5-433f-a0ef-0d6bffb35579] Powering on the VM {{(pid=62503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 882.520901] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f3eedc95-ab06-4809-9d85-f9bd460b3073 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.528259] env[62503]: DEBUG oslo_vmware.api [None req-fc728101-2fc5-4ffa-a37b-1d922196d83a tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Waiting for the task: (returnval){ [ 882.528259] env[62503]: value = "task-1387829" [ 882.528259] env[62503]: _type = "Task" [ 882.528259] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.536443] env[62503]: DEBUG oslo_vmware.api [None req-fc728101-2fc5-4ffa-a37b-1d922196d83a tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1387829, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.596561] env[62503]: DEBUG nova.scheduler.client.report [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 882.663084] env[62503]: DEBUG oslo_vmware.api [None req-157c52a0-86d9-4396-8baf-ed1051ee6632 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]527feff2-c6df-5877-973f-fa1d69c40a4c, 'name': SearchDatastore_Task, 'duration_secs': 0.021723} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.663766] env[62503]: DEBUG oslo_concurrency.lockutils [None req-157c52a0-86d9-4396-8baf-ed1051ee6632 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 882.663766] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-157c52a0-86d9-4396-8baf-ed1051ee6632 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: e693bcc2-3883-466d-913c-831146ca81e7] Processing image 8150ca02-f879-471d-8913-459408f127a1 {{(pid=62503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 882.664968] env[62503]: DEBUG oslo_concurrency.lockutils [None req-157c52a0-86d9-4396-8baf-ed1051ee6632 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 882.664968] env[62503]: DEBUG oslo_concurrency.lockutils [None req-157c52a0-86d9-4396-8baf-ed1051ee6632 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 882.664968] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-157c52a0-86d9-4396-8baf-ed1051ee6632 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 882.664968] env[62503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a2422606-0f18-4895-bbce-bd981e74368d {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.673949] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-157c52a0-86d9-4396-8baf-ed1051ee6632 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 882.674161] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-157c52a0-86d9-4396-8baf-ed1051ee6632 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 882.674894] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7317d76c-8da5-4056-b95c-3e03a0507c1c {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.680358] env[62503]: DEBUG oslo_vmware.api [None req-157c52a0-86d9-4396-8baf-ed1051ee6632 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Waiting for the task: (returnval){ [ 882.680358] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]522ec0f5-e408-b234-e1c9-9889ae605ee4" [ 882.680358] env[62503]: _type = "Task" [ 882.680358] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.689657] env[62503]: DEBUG oslo_vmware.api [None req-157c52a0-86d9-4396-8baf-ed1051ee6632 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]522ec0f5-e408-b234-e1c9-9889ae605ee4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.712752] env[62503]: DEBUG nova.compute.manager [None req-30e8662c-df08-452b-b0da-6a8680594759 tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] [instance: 7b8c670d-3f2a-431d-91da-4ced781e6e51] Starting instance... {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 882.728869] env[62503]: DEBUG oslo_concurrency.lockutils [None req-2026f2a3-ca55-406d-af72-4e933a561068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Acquiring lock "c09488ed-e354-4abf-8999-b2f8afec44fc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 882.728869] env[62503]: DEBUG oslo_concurrency.lockutils [None req-2026f2a3-ca55-406d-af72-4e933a561068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Lock "c09488ed-e354-4abf-8999-b2f8afec44fc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 882.754321] env[62503]: DEBUG oslo_concurrency.lockutils [None req-ff3bdbcf-6ae4-49c9-ba46-53b2b0c34193 tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] Releasing lock "refresh_cache-cf611345-d276-4745-a2f8-0551c9dca2c2" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 882.756632] env[62503]: DEBUG oslo_concurrency.lockutils [req-d9c2d5a2-0f20-488b-bac8-ec251d686e19 req-00129a1f-b23e-47ef-9e51-6619c1fc11ac service nova] Acquired lock "refresh_cache-cf611345-d276-4745-a2f8-0551c9dca2c2" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 882.756919] env[62503]: DEBUG nova.network.neutron [req-d9c2d5a2-0f20-488b-bac8-ec251d686e19 req-00129a1f-b23e-47ef-9e51-6619c1fc11ac service nova] [instance: cf611345-d276-4745-a2f8-0551c9dca2c2] Refreshing network info cache for port 6a859ef4-58a2-46ba-9e2e-b0857df9cfea {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 882.759869] env[62503]: DEBUG nova.compute.manager [None req-ff3bdbcf-6ae4-49c9-ba46-53b2b0c34193 tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] [instance: cf611345-d276-4745-a2f8-0551c9dca2c2] Checking state {{(pid=62503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1794}} [ 882.762049] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c796367a-66ec-47bb-a742-52e26a99e41d {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.890434] env[62503]: DEBUG nova.compute.manager [req-fb96ea9c-23a8-49ac-bd36-a0ba556b9035 req-4eb540c5-1fca-45ce-a9ba-9b36a3ce15fc service nova] [instance: c9129f68-c755-4b78-b067-b77b01048c02] Received event network-vif-plugged-792806f8-f6ea-4abd-9085-2a2ce83df26b {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 882.890434] env[62503]: DEBUG oslo_concurrency.lockutils [req-fb96ea9c-23a8-49ac-bd36-a0ba556b9035 req-4eb540c5-1fca-45ce-a9ba-9b36a3ce15fc service nova] Acquiring lock "c9129f68-c755-4b78-b067-b77b01048c02-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 882.890434] env[62503]: DEBUG oslo_concurrency.lockutils [req-fb96ea9c-23a8-49ac-bd36-a0ba556b9035 req-4eb540c5-1fca-45ce-a9ba-9b36a3ce15fc service nova] Lock "c9129f68-c755-4b78-b067-b77b01048c02-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 882.890434] env[62503]: DEBUG oslo_concurrency.lockutils [req-fb96ea9c-23a8-49ac-bd36-a0ba556b9035 req-4eb540c5-1fca-45ce-a9ba-9b36a3ce15fc service nova] Lock "c9129f68-c755-4b78-b067-b77b01048c02-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 882.890434] env[62503]: DEBUG nova.compute.manager [req-fb96ea9c-23a8-49ac-bd36-a0ba556b9035 req-4eb540c5-1fca-45ce-a9ba-9b36a3ce15fc service nova] [instance: c9129f68-c755-4b78-b067-b77b01048c02] No waiting events found dispatching network-vif-plugged-792806f8-f6ea-4abd-9085-2a2ce83df26b {{(pid=62503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 882.890434] env[62503]: WARNING nova.compute.manager [req-fb96ea9c-23a8-49ac-bd36-a0ba556b9035 req-4eb540c5-1fca-45ce-a9ba-9b36a3ce15fc service nova] [instance: c9129f68-c755-4b78-b067-b77b01048c02] Received unexpected event network-vif-plugged-792806f8-f6ea-4abd-9085-2a2ce83df26b for instance with vm_state building and task_state spawning. [ 882.890434] env[62503]: DEBUG nova.compute.manager [req-fb96ea9c-23a8-49ac-bd36-a0ba556b9035 req-4eb540c5-1fca-45ce-a9ba-9b36a3ce15fc service nova] [instance: c9129f68-c755-4b78-b067-b77b01048c02] Received event network-changed-792806f8-f6ea-4abd-9085-2a2ce83df26b {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 882.890434] env[62503]: DEBUG nova.compute.manager [req-fb96ea9c-23a8-49ac-bd36-a0ba556b9035 req-4eb540c5-1fca-45ce-a9ba-9b36a3ce15fc service nova] [instance: c9129f68-c755-4b78-b067-b77b01048c02] Refreshing instance network info cache due to event network-changed-792806f8-f6ea-4abd-9085-2a2ce83df26b. {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11432}} [ 882.890434] env[62503]: DEBUG oslo_concurrency.lockutils [req-fb96ea9c-23a8-49ac-bd36-a0ba556b9035 req-4eb540c5-1fca-45ce-a9ba-9b36a3ce15fc service nova] Acquiring lock "refresh_cache-c9129f68-c755-4b78-b067-b77b01048c02" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 882.896486] env[62503]: DEBUG oslo_concurrency.lockutils [None req-085a834d-69f1-4b6f-a2c6-d0f37cb5d14b tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Acquiring lock "e7556915-634f-40d6-9e7f-da1c3201d8e4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 882.896960] env[62503]: DEBUG oslo_concurrency.lockutils [None req-085a834d-69f1-4b6f-a2c6-d0f37cb5d14b tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Lock "e7556915-634f-40d6-9e7f-da1c3201d8e4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 882.966879] env[62503]: DEBUG nova.network.neutron [None req-a01f7696-ade1-4d90-9188-a82a8c2369af tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: c9129f68-c755-4b78-b067-b77b01048c02] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 882.978787] env[62503]: DEBUG oslo_concurrency.lockutils [None req-409bdb25-8a95-4fcd-b277-69a03419ff2a tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 882.992564] env[62503]: DEBUG oslo_concurrency.lockutils [None req-784ff18d-15f2-4cb8-8623-ff87e0cf2977 tempest-InstanceActionsTestJSON-257954437 tempest-InstanceActionsTestJSON-257954437-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 883.037649] env[62503]: DEBUG oslo_vmware.api [None req-fc728101-2fc5-4ffa-a37b-1d922196d83a tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1387829, 'name': PowerOnVM_Task, 'duration_secs': 0.482147} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 883.037947] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc728101-2fc5-4ffa-a37b-1d922196d83a tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: b6fddb0d-70f5-433f-a0ef-0d6bffb35579] Powered on the VM {{(pid=62503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 883.038150] env[62503]: INFO nova.compute.manager [None req-fc728101-2fc5-4ffa-a37b-1d922196d83a tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: b6fddb0d-70f5-433f-a0ef-0d6bffb35579] Took 9.34 seconds to spawn the instance on the hypervisor. [ 883.038360] env[62503]: DEBUG nova.compute.manager [None req-fc728101-2fc5-4ffa-a37b-1d922196d83a tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: b6fddb0d-70f5-433f-a0ef-0d6bffb35579] Checking state {{(pid=62503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1794}} [ 883.039118] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04294f1c-5c6b-414a-8ca0-46ae66a86f96 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.103539] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62503) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 883.103755] env[62503]: DEBUG oslo_concurrency.lockutils [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.866s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 883.104063] env[62503]: DEBUG oslo_concurrency.lockutils [None req-65c4b598-043d-4ffb-9049-2bd83f4e3fd6 tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.919s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 883.104262] env[62503]: DEBUG oslo_concurrency.lockutils [None req-65c4b598-043d-4ffb-9049-2bd83f4e3fd6 tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 883.106729] env[62503]: DEBUG oslo_concurrency.lockutils [None req-bc583026-98f6-4899-b166-facb931d7a18 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.140s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 883.108185] env[62503]: INFO nova.compute.claims [None req-bc583026-98f6-4899-b166-facb931d7a18 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: ca1f1966-bfe1-495e-b055-f72150f72470] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 883.127484] env[62503]: INFO nova.scheduler.client.report [None req-65c4b598-043d-4ffb-9049-2bd83f4e3fd6 tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] Deleted allocations for instance 6229dda6-90e8-457b-beb3-2107e3700b29 [ 883.189244] env[62503]: DEBUG nova.network.neutron [None req-d3b4e233-4266-4cfe-a227-3bff2b6257fb tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: 16167e53-e45b-4b37-90c6-ab2c30ebf1aa] Successfully updated port: 632bec04-d9b9-4178-bb3c-104caa5ee159 {{(pid=62503) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 883.196596] env[62503]: DEBUG oslo_vmware.api [None req-157c52a0-86d9-4396-8baf-ed1051ee6632 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]522ec0f5-e408-b234-e1c9-9889ae605ee4, 'name': SearchDatastore_Task, 'duration_secs': 0.015005} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 883.197432] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d8b2c3ca-a3a4-4b18-8a3e-335677d9787e {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.203046] env[62503]: DEBUG nova.network.neutron [None req-a01f7696-ade1-4d90-9188-a82a8c2369af tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: c9129f68-c755-4b78-b067-b77b01048c02] Updating instance_info_cache with network_info: [{"id": "792806f8-f6ea-4abd-9085-2a2ce83df26b", "address": "fa:16:3e:17:08:5e", "network": {"id": "8feb2896-b13e-4845-9a81-881bf941f703", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1117860439-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0849093c8b48400a8e9d56171ea99e8f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92cdccfd-4b10-4024-b724-5f22792dd4de", "external-id": "nsx-vlan-transportzone-902", "segmentation_id": 902, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap792806f8-f6", "ovs_interfaceid": "792806f8-f6ea-4abd-9085-2a2ce83df26b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 883.203904] env[62503]: WARNING oslo_messaging._drivers.amqpdriver [None req-a01f7696-ade1-4d90-9188-a82a8c2369af tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Number of call queues is 11, greater than warning threshold: 10. There could be a leak. Increasing threshold to: 20 [ 883.206408] env[62503]: DEBUG oslo_vmware.api [None req-157c52a0-86d9-4396-8baf-ed1051ee6632 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Waiting for the task: (returnval){ [ 883.206408] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]526d1bc5-1d60-4212-beda-ab3249141ae4" [ 883.206408] env[62503]: _type = "Task" [ 883.206408] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.214418] env[62503]: DEBUG oslo_vmware.api [None req-157c52a0-86d9-4396-8baf-ed1051ee6632 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]526d1bc5-1d60-4212-beda-ab3249141ae4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.230063] env[62503]: DEBUG oslo_concurrency.lockutils [None req-30e8662c-df08-452b-b0da-6a8680594759 tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 883.232606] env[62503]: DEBUG nova.compute.manager [None req-2026f2a3-ca55-406d-af72-4e933a561068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: c09488ed-e354-4abf-8999-b2f8afec44fc] Starting instance... {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 883.402561] env[62503]: DEBUG nova.compute.manager [None req-085a834d-69f1-4b6f-a2c6-d0f37cb5d14b tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] [instance: e7556915-634f-40d6-9e7f-da1c3201d8e4] Starting instance... {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 883.523249] env[62503]: DEBUG nova.network.neutron [req-d9c2d5a2-0f20-488b-bac8-ec251d686e19 req-00129a1f-b23e-47ef-9e51-6619c1fc11ac service nova] [instance: cf611345-d276-4745-a2f8-0551c9dca2c2] Updated VIF entry in instance network info cache for port 6a859ef4-58a2-46ba-9e2e-b0857df9cfea. {{(pid=62503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 883.523627] env[62503]: DEBUG nova.network.neutron [req-d9c2d5a2-0f20-488b-bac8-ec251d686e19 req-00129a1f-b23e-47ef-9e51-6619c1fc11ac service nova] [instance: cf611345-d276-4745-a2f8-0551c9dca2c2] Updating instance_info_cache with network_info: [{"id": "6a859ef4-58a2-46ba-9e2e-b0857df9cfea", "address": "fa:16:3e:b1:7b:26", "network": {"id": "f6605088-4934-47d3-a3a3-7d24821eaddf", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1250737836-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d404225d6f9c46148e0b7080ec1eee99", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "16c6ea68-9b0e-4ac0-a484-7a9a40533017", "external-id": "nsx-vlan-transportzone-384", "segmentation_id": 384, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6a859ef4-58", "ovs_interfaceid": "6a859ef4-58a2-46ba-9e2e-b0857df9cfea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 883.560973] env[62503]: INFO nova.compute.manager [None req-fc728101-2fc5-4ffa-a37b-1d922196d83a tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: b6fddb0d-70f5-433f-a0ef-0d6bffb35579] Took 29.10 seconds to build instance. [ 883.635625] env[62503]: DEBUG oslo_concurrency.lockutils [None req-65c4b598-043d-4ffb-9049-2bd83f4e3fd6 tempest-ServersTestMultiNic-457261078 tempest-ServersTestMultiNic-457261078-project-member] Lock "6229dda6-90e8-457b-beb3-2107e3700b29" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.100s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 883.692570] env[62503]: DEBUG oslo_concurrency.lockutils [None req-d3b4e233-4266-4cfe-a227-3bff2b6257fb tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Acquiring lock "refresh_cache-16167e53-e45b-4b37-90c6-ab2c30ebf1aa" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 883.692570] env[62503]: DEBUG oslo_concurrency.lockutils [None req-d3b4e233-4266-4cfe-a227-3bff2b6257fb tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Acquired lock "refresh_cache-16167e53-e45b-4b37-90c6-ab2c30ebf1aa" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 883.692570] env[62503]: DEBUG nova.network.neutron [None req-d3b4e233-4266-4cfe-a227-3bff2b6257fb tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: 16167e53-e45b-4b37-90c6-ab2c30ebf1aa] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 883.705280] env[62503]: DEBUG oslo_concurrency.lockutils [None req-a01f7696-ade1-4d90-9188-a82a8c2369af tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Releasing lock "refresh_cache-c9129f68-c755-4b78-b067-b77b01048c02" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 883.705810] env[62503]: DEBUG nova.compute.manager [None req-a01f7696-ade1-4d90-9188-a82a8c2369af tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: c9129f68-c755-4b78-b067-b77b01048c02] Instance network_info: |[{"id": "792806f8-f6ea-4abd-9085-2a2ce83df26b", "address": "fa:16:3e:17:08:5e", "network": {"id": "8feb2896-b13e-4845-9a81-881bf941f703", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1117860439-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0849093c8b48400a8e9d56171ea99e8f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92cdccfd-4b10-4024-b724-5f22792dd4de", "external-id": "nsx-vlan-transportzone-902", "segmentation_id": 902, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap792806f8-f6", "ovs_interfaceid": "792806f8-f6ea-4abd-9085-2a2ce83df26b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1999}} [ 883.709019] env[62503]: DEBUG oslo_concurrency.lockutils [req-fb96ea9c-23a8-49ac-bd36-a0ba556b9035 req-4eb540c5-1fca-45ce-a9ba-9b36a3ce15fc service nova] Acquired lock "refresh_cache-c9129f68-c755-4b78-b067-b77b01048c02" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 883.709019] env[62503]: DEBUG nova.network.neutron [req-fb96ea9c-23a8-49ac-bd36-a0ba556b9035 req-4eb540c5-1fca-45ce-a9ba-9b36a3ce15fc service nova] [instance: c9129f68-c755-4b78-b067-b77b01048c02] Refreshing network info cache for port 792806f8-f6ea-4abd-9085-2a2ce83df26b {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 883.711330] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-a01f7696-ade1-4d90-9188-a82a8c2369af tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: c9129f68-c755-4b78-b067-b77b01048c02] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:17:08:5e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '92cdccfd-4b10-4024-b724-5f22792dd4de', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '792806f8-f6ea-4abd-9085-2a2ce83df26b', 'vif_model': 'vmxnet3'}] {{(pid=62503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 883.722349] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-a01f7696-ade1-4d90-9188-a82a8c2369af tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Creating folder: Project (0849093c8b48400a8e9d56171ea99e8f). Parent ref: group-v294540. {{(pid=62503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 883.723422] env[62503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f0196160-a62c-46d8-8437-72ef96c95b5b {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.739139] env[62503]: DEBUG oslo_vmware.api [None req-157c52a0-86d9-4396-8baf-ed1051ee6632 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]526d1bc5-1d60-4212-beda-ab3249141ae4, 'name': SearchDatastore_Task, 'duration_secs': 0.039564} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 883.743967] env[62503]: DEBUG oslo_concurrency.lockutils [None req-157c52a0-86d9-4396-8baf-ed1051ee6632 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 883.743967] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-157c52a0-86d9-4396-8baf-ed1051ee6632 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk to [datastore2] e693bcc2-3883-466d-913c-831146ca81e7/e693bcc2-3883-466d-913c-831146ca81e7.vmdk {{(pid=62503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 883.743967] env[62503]: INFO nova.virt.vmwareapi.vm_util [None req-a01f7696-ade1-4d90-9188-a82a8c2369af tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Created folder: Project (0849093c8b48400a8e9d56171ea99e8f) in parent group-v294540. [ 883.744268] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-a01f7696-ade1-4d90-9188-a82a8c2369af tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Creating folder: Instances. Parent ref: group-v294586. {{(pid=62503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 883.744932] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e29465bb-0d2b-4f1d-895c-59f39f160d71 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.746688] env[62503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-75487352-5967-4695-baea-afe70f0d3058 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.754503] env[62503]: DEBUG oslo_vmware.api [None req-157c52a0-86d9-4396-8baf-ed1051ee6632 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Waiting for the task: (returnval){ [ 883.754503] env[62503]: value = "task-1387831" [ 883.754503] env[62503]: _type = "Task" [ 883.754503] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.755593] env[62503]: DEBUG oslo_concurrency.lockutils [None req-2026f2a3-ca55-406d-af72-4e933a561068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 883.761684] env[62503]: INFO nova.virt.vmwareapi.vm_util [None req-a01f7696-ade1-4d90-9188-a82a8c2369af tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Created folder: Instances in parent group-v294586. [ 883.761877] env[62503]: DEBUG oslo.service.loopingcall [None req-a01f7696-ade1-4d90-9188-a82a8c2369af tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 883.762448] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c9129f68-c755-4b78-b067-b77b01048c02] Creating VM on the ESX host {{(pid=62503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 883.763642] env[62503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-50594695-8827-4156-a1f6-27acd93b6acc {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.788068] env[62503]: DEBUG oslo_vmware.api [None req-157c52a0-86d9-4396-8baf-ed1051ee6632 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Task: {'id': task-1387831, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.789151] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c06d4b91-e36c-44f9-930c-6b0ed97d9ca8 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.796918] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-ff3bdbcf-6ae4-49c9-ba46-53b2b0c34193 tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] [instance: cf611345-d276-4745-a2f8-0551c9dca2c2] Doing hard reboot of VM {{(pid=62503) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1063}} [ 883.798261] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-a0ad69d1-ed3b-41e4-955d-e49a557b8cd5 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.799693] env[62503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 883.799693] env[62503]: value = "task-1387833" [ 883.799693] env[62503]: _type = "Task" [ 883.799693] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.804108] env[62503]: DEBUG oslo_vmware.api [None req-ff3bdbcf-6ae4-49c9-ba46-53b2b0c34193 tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] Waiting for the task: (returnval){ [ 883.804108] env[62503]: value = "task-1387834" [ 883.804108] env[62503]: _type = "Task" [ 883.804108] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.810284] env[62503]: DEBUG oslo_vmware.api [-] Task: {'id': task-1387833, 'name': CreateVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.815029] env[62503]: DEBUG oslo_vmware.api [None req-ff3bdbcf-6ae4-49c9-ba46-53b2b0c34193 tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] Task: {'id': task-1387834, 'name': ResetVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.924608] env[62503]: DEBUG oslo_concurrency.lockutils [None req-085a834d-69f1-4b6f-a2c6-d0f37cb5d14b tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 884.028184] env[62503]: DEBUG oslo_concurrency.lockutils [req-d9c2d5a2-0f20-488b-bac8-ec251d686e19 req-00129a1f-b23e-47ef-9e51-6619c1fc11ac service nova] Releasing lock "refresh_cache-cf611345-d276-4745-a2f8-0551c9dca2c2" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 884.063309] env[62503]: DEBUG oslo_concurrency.lockutils [None req-fc728101-2fc5-4ffa-a37b-1d922196d83a tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Lock "b6fddb0d-70f5-433f-a0ef-0d6bffb35579" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 46.131s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 884.256365] env[62503]: DEBUG nova.network.neutron [None req-d3b4e233-4266-4cfe-a227-3bff2b6257fb tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: 16167e53-e45b-4b37-90c6-ab2c30ebf1aa] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 884.271923] env[62503]: DEBUG oslo_vmware.api [None req-157c52a0-86d9-4396-8baf-ed1051ee6632 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Task: {'id': task-1387831, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.312321] env[62503]: DEBUG oslo_vmware.api [-] Task: {'id': task-1387833, 'name': CreateVM_Task, 'duration_secs': 0.440972} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 884.313111] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c9129f68-c755-4b78-b067-b77b01048c02] Created VM on the ESX host {{(pid=62503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 884.313611] env[62503]: DEBUG oslo_concurrency.lockutils [None req-a01f7696-ade1-4d90-9188-a82a8c2369af tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 884.313783] env[62503]: DEBUG oslo_concurrency.lockutils [None req-a01f7696-ade1-4d90-9188-a82a8c2369af tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 884.314146] env[62503]: DEBUG oslo_concurrency.lockutils [None req-a01f7696-ade1-4d90-9188-a82a8c2369af tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 884.317636] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b638037f-d3f4-4733-b5b9-06d5da10d6e1 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.319328] env[62503]: DEBUG oslo_vmware.api [None req-ff3bdbcf-6ae4-49c9-ba46-53b2b0c34193 tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] Task: {'id': task-1387834, 'name': ResetVM_Task, 'duration_secs': 0.095272} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 884.319899] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6da90854-c43d-4015-96b9-2ecf718ea372 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.324083] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-ff3bdbcf-6ae4-49c9-ba46-53b2b0c34193 tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] [instance: cf611345-d276-4745-a2f8-0551c9dca2c2] Did hard reboot of VM {{(pid=62503) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1067}} [ 884.324381] env[62503]: DEBUG nova.compute.manager [None req-ff3bdbcf-6ae4-49c9-ba46-53b2b0c34193 tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] [instance: cf611345-d276-4745-a2f8-0551c9dca2c2] Checking state {{(pid=62503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1794}} [ 884.325410] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffdb7cc3-5760-482c-b488-3f495090076c {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.329343] env[62503]: DEBUG oslo_vmware.api [None req-a01f7696-ade1-4d90-9188-a82a8c2369af tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Waiting for the task: (returnval){ [ 884.329343] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52cb2f75-a3d6-e1ee-4af4-64e92c891020" [ 884.329343] env[62503]: _type = "Task" [ 884.329343] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.337854] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a18ea5c-11b8-46db-be19-4c12f0509ced {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.353373] env[62503]: DEBUG oslo_vmware.api [None req-a01f7696-ade1-4d90-9188-a82a8c2369af tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52cb2f75-a3d6-e1ee-4af4-64e92c891020, 'name': SearchDatastore_Task, 'duration_secs': 0.01124} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 884.379636] env[62503]: DEBUG oslo_concurrency.lockutils [None req-a01f7696-ade1-4d90-9188-a82a8c2369af tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 884.380738] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-a01f7696-ade1-4d90-9188-a82a8c2369af tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: c9129f68-c755-4b78-b067-b77b01048c02] Processing image 8150ca02-f879-471d-8913-459408f127a1 {{(pid=62503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 884.381083] env[62503]: DEBUG oslo_concurrency.lockutils [None req-a01f7696-ade1-4d90-9188-a82a8c2369af tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 884.381566] env[62503]: DEBUG oslo_concurrency.lockutils [None req-a01f7696-ade1-4d90-9188-a82a8c2369af tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 884.381796] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-a01f7696-ade1-4d90-9188-a82a8c2369af tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 884.386720] env[62503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9cd2383b-16a5-44d4-b24f-76b5ddf58983 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.389151] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a70de726-ead1-44d3-95b3-26123e29168e {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.398294] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-449238a8-a4ef-44a4-801a-ae2136b70d2b {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.403155] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-a01f7696-ade1-4d90-9188-a82a8c2369af tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 884.403362] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-a01f7696-ade1-4d90-9188-a82a8c2369af tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 884.404645] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-03b3020d-e996-4edf-a796-ac03b2ca3f01 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.414625] env[62503]: DEBUG nova.compute.provider_tree [None req-bc583026-98f6-4899-b166-facb931d7a18 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 884.418587] env[62503]: DEBUG oslo_vmware.api [None req-a01f7696-ade1-4d90-9188-a82a8c2369af tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Waiting for the task: (returnval){ [ 884.418587] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52ea335f-f878-d9cc-35c0-e91f85cae056" [ 884.418587] env[62503]: _type = "Task" [ 884.418587] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.426898] env[62503]: DEBUG oslo_vmware.api [None req-a01f7696-ade1-4d90-9188-a82a8c2369af tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52ea335f-f878-d9cc-35c0-e91f85cae056, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.516887] env[62503]: DEBUG nova.network.neutron [None req-d3b4e233-4266-4cfe-a227-3bff2b6257fb tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: 16167e53-e45b-4b37-90c6-ab2c30ebf1aa] Updating instance_info_cache with network_info: [{"id": "632bec04-d9b9-4178-bb3c-104caa5ee159", "address": "fa:16:3e:f5:45:70", "network": {"id": "fd4d9644-88d7-4c3d-9bee-9e25ca3a7d0b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2096443058-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f823912f7b1a4998a6dbc22060cf6c5e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "685b4083-b748-41fb-a68a-273b1073fa28", "external-id": "nsx-vlan-transportzone-312", "segmentation_id": 312, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap632bec04-d9", "ovs_interfaceid": "632bec04-d9b9-4178-bb3c-104caa5ee159", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 884.561160] env[62503]: DEBUG nova.network.neutron [req-fb96ea9c-23a8-49ac-bd36-a0ba556b9035 req-4eb540c5-1fca-45ce-a9ba-9b36a3ce15fc service nova] [instance: c9129f68-c755-4b78-b067-b77b01048c02] Updated VIF entry in instance network info cache for port 792806f8-f6ea-4abd-9085-2a2ce83df26b. {{(pid=62503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 884.561418] env[62503]: DEBUG nova.network.neutron [req-fb96ea9c-23a8-49ac-bd36-a0ba556b9035 req-4eb540c5-1fca-45ce-a9ba-9b36a3ce15fc service nova] [instance: c9129f68-c755-4b78-b067-b77b01048c02] Updating instance_info_cache with network_info: [{"id": "792806f8-f6ea-4abd-9085-2a2ce83df26b", "address": "fa:16:3e:17:08:5e", "network": {"id": "8feb2896-b13e-4845-9a81-881bf941f703", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1117860439-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0849093c8b48400a8e9d56171ea99e8f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92cdccfd-4b10-4024-b724-5f22792dd4de", "external-id": "nsx-vlan-transportzone-902", "segmentation_id": 902, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap792806f8-f6", "ovs_interfaceid": "792806f8-f6ea-4abd-9085-2a2ce83df26b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 884.663169] env[62503]: DEBUG nova.compute.manager [req-10b426a2-981b-4b76-9625-987930deb218 req-6b6950bf-11b9-48fb-98e1-661e0b8cca2a service nova] [instance: b6fddb0d-70f5-433f-a0ef-0d6bffb35579] Received event network-changed-24ac3187-6729-47ea-beb6-4c96018b8a05 {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 884.663169] env[62503]: DEBUG nova.compute.manager [req-10b426a2-981b-4b76-9625-987930deb218 req-6b6950bf-11b9-48fb-98e1-661e0b8cca2a service nova] [instance: b6fddb0d-70f5-433f-a0ef-0d6bffb35579] Refreshing instance network info cache due to event network-changed-24ac3187-6729-47ea-beb6-4c96018b8a05. {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11432}} [ 884.663169] env[62503]: DEBUG oslo_concurrency.lockutils [req-10b426a2-981b-4b76-9625-987930deb218 req-6b6950bf-11b9-48fb-98e1-661e0b8cca2a service nova] Acquiring lock "refresh_cache-b6fddb0d-70f5-433f-a0ef-0d6bffb35579" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 884.663464] env[62503]: DEBUG oslo_concurrency.lockutils [req-10b426a2-981b-4b76-9625-987930deb218 req-6b6950bf-11b9-48fb-98e1-661e0b8cca2a service nova] Acquired lock "refresh_cache-b6fddb0d-70f5-433f-a0ef-0d6bffb35579" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 884.663464] env[62503]: DEBUG nova.network.neutron [req-10b426a2-981b-4b76-9625-987930deb218 req-6b6950bf-11b9-48fb-98e1-661e0b8cca2a service nova] [instance: b6fddb0d-70f5-433f-a0ef-0d6bffb35579] Refreshing network info cache for port 24ac3187-6729-47ea-beb6-4c96018b8a05 {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 884.767409] env[62503]: DEBUG oslo_vmware.api [None req-157c52a0-86d9-4396-8baf-ed1051ee6632 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Task: {'id': task-1387831, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.581251} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 884.768214] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-157c52a0-86d9-4396-8baf-ed1051ee6632 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk to [datastore2] e693bcc2-3883-466d-913c-831146ca81e7/e693bcc2-3883-466d-913c-831146ca81e7.vmdk {{(pid=62503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 884.768214] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-157c52a0-86d9-4396-8baf-ed1051ee6632 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: e693bcc2-3883-466d-913c-831146ca81e7] Extending root virtual disk to 1048576 {{(pid=62503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 884.768214] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-67c4f41b-2137-4b25-b6ef-37f6f2b1da84 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.775553] env[62503]: DEBUG oslo_vmware.api [None req-157c52a0-86d9-4396-8baf-ed1051ee6632 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Waiting for the task: (returnval){ [ 884.775553] env[62503]: value = "task-1387835" [ 884.775553] env[62503]: _type = "Task" [ 884.775553] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.784275] env[62503]: DEBUG oslo_vmware.api [None req-157c52a0-86d9-4396-8baf-ed1051ee6632 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Task: {'id': task-1387835, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.850905] env[62503]: DEBUG oslo_concurrency.lockutils [None req-ff3bdbcf-6ae4-49c9-ba46-53b2b0c34193 tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] Lock "cf611345-d276-4745-a2f8-0551c9dca2c2" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 4.003s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 884.919037] env[62503]: DEBUG nova.scheduler.client.report [None req-bc583026-98f6-4899-b166-facb931d7a18 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 884.924404] env[62503]: DEBUG nova.compute.manager [req-aa7cc2b0-6ed9-4185-9053-63e66524f0c9 req-7c5d5263-01d5-4eee-a353-2cbf0db9465f service nova] [instance: 16167e53-e45b-4b37-90c6-ab2c30ebf1aa] Received event network-vif-plugged-632bec04-d9b9-4178-bb3c-104caa5ee159 {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 884.924629] env[62503]: DEBUG oslo_concurrency.lockutils [req-aa7cc2b0-6ed9-4185-9053-63e66524f0c9 req-7c5d5263-01d5-4eee-a353-2cbf0db9465f service nova] Acquiring lock "16167e53-e45b-4b37-90c6-ab2c30ebf1aa-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 884.924864] env[62503]: DEBUG oslo_concurrency.lockutils [req-aa7cc2b0-6ed9-4185-9053-63e66524f0c9 req-7c5d5263-01d5-4eee-a353-2cbf0db9465f service nova] Lock "16167e53-e45b-4b37-90c6-ab2c30ebf1aa-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 884.925061] env[62503]: DEBUG oslo_concurrency.lockutils [req-aa7cc2b0-6ed9-4185-9053-63e66524f0c9 req-7c5d5263-01d5-4eee-a353-2cbf0db9465f service nova] Lock "16167e53-e45b-4b37-90c6-ab2c30ebf1aa-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 884.925241] env[62503]: DEBUG nova.compute.manager [req-aa7cc2b0-6ed9-4185-9053-63e66524f0c9 req-7c5d5263-01d5-4eee-a353-2cbf0db9465f service nova] [instance: 16167e53-e45b-4b37-90c6-ab2c30ebf1aa] No waiting events found dispatching network-vif-plugged-632bec04-d9b9-4178-bb3c-104caa5ee159 {{(pid=62503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 884.925414] env[62503]: WARNING nova.compute.manager [req-aa7cc2b0-6ed9-4185-9053-63e66524f0c9 req-7c5d5263-01d5-4eee-a353-2cbf0db9465f service nova] [instance: 16167e53-e45b-4b37-90c6-ab2c30ebf1aa] Received unexpected event network-vif-plugged-632bec04-d9b9-4178-bb3c-104caa5ee159 for instance with vm_state building and task_state spawning. [ 884.925574] env[62503]: DEBUG nova.compute.manager [req-aa7cc2b0-6ed9-4185-9053-63e66524f0c9 req-7c5d5263-01d5-4eee-a353-2cbf0db9465f service nova] [instance: 16167e53-e45b-4b37-90c6-ab2c30ebf1aa] Received event network-changed-632bec04-d9b9-4178-bb3c-104caa5ee159 {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 884.925732] env[62503]: DEBUG nova.compute.manager [req-aa7cc2b0-6ed9-4185-9053-63e66524f0c9 req-7c5d5263-01d5-4eee-a353-2cbf0db9465f service nova] [instance: 16167e53-e45b-4b37-90c6-ab2c30ebf1aa] Refreshing instance network info cache due to event network-changed-632bec04-d9b9-4178-bb3c-104caa5ee159. {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11432}} [ 884.925901] env[62503]: DEBUG oslo_concurrency.lockutils [req-aa7cc2b0-6ed9-4185-9053-63e66524f0c9 req-7c5d5263-01d5-4eee-a353-2cbf0db9465f service nova] Acquiring lock "refresh_cache-16167e53-e45b-4b37-90c6-ab2c30ebf1aa" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 884.939838] env[62503]: DEBUG oslo_vmware.api [None req-a01f7696-ade1-4d90-9188-a82a8c2369af tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52ea335f-f878-d9cc-35c0-e91f85cae056, 'name': SearchDatastore_Task, 'duration_secs': 0.045594} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 884.940893] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-abf69c91-ceaa-4f3c-9361-bc6fe105326c {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.946053] env[62503]: DEBUG oslo_vmware.api [None req-a01f7696-ade1-4d90-9188-a82a8c2369af tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Waiting for the task: (returnval){ [ 884.946053] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]526eae00-330b-d8b0-ba2c-7f0bd767b236" [ 884.946053] env[62503]: _type = "Task" [ 884.946053] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.953857] env[62503]: DEBUG oslo_vmware.api [None req-a01f7696-ade1-4d90-9188-a82a8c2369af tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]526eae00-330b-d8b0-ba2c-7f0bd767b236, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.020896] env[62503]: DEBUG oslo_concurrency.lockutils [None req-d3b4e233-4266-4cfe-a227-3bff2b6257fb tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Releasing lock "refresh_cache-16167e53-e45b-4b37-90c6-ab2c30ebf1aa" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 885.021387] env[62503]: DEBUG nova.compute.manager [None req-d3b4e233-4266-4cfe-a227-3bff2b6257fb tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: 16167e53-e45b-4b37-90c6-ab2c30ebf1aa] Instance network_info: |[{"id": "632bec04-d9b9-4178-bb3c-104caa5ee159", "address": "fa:16:3e:f5:45:70", "network": {"id": "fd4d9644-88d7-4c3d-9bee-9e25ca3a7d0b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2096443058-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f823912f7b1a4998a6dbc22060cf6c5e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "685b4083-b748-41fb-a68a-273b1073fa28", "external-id": "nsx-vlan-transportzone-312", "segmentation_id": 312, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap632bec04-d9", "ovs_interfaceid": "632bec04-d9b9-4178-bb3c-104caa5ee159", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1999}} [ 885.021802] env[62503]: DEBUG oslo_concurrency.lockutils [req-aa7cc2b0-6ed9-4185-9053-63e66524f0c9 req-7c5d5263-01d5-4eee-a353-2cbf0db9465f service nova] Acquired lock "refresh_cache-16167e53-e45b-4b37-90c6-ab2c30ebf1aa" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 885.022113] env[62503]: DEBUG nova.network.neutron [req-aa7cc2b0-6ed9-4185-9053-63e66524f0c9 req-7c5d5263-01d5-4eee-a353-2cbf0db9465f service nova] [instance: 16167e53-e45b-4b37-90c6-ab2c30ebf1aa] Refreshing network info cache for port 632bec04-d9b9-4178-bb3c-104caa5ee159 {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 885.024224] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-d3b4e233-4266-4cfe-a227-3bff2b6257fb tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: 16167e53-e45b-4b37-90c6-ab2c30ebf1aa] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f5:45:70', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '685b4083-b748-41fb-a68a-273b1073fa28', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '632bec04-d9b9-4178-bb3c-104caa5ee159', 'vif_model': 'vmxnet3'}] {{(pid=62503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 885.039187] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3b4e233-4266-4cfe-a227-3bff2b6257fb tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Creating folder: Project (f823912f7b1a4998a6dbc22060cf6c5e). Parent ref: group-v294540. {{(pid=62503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 885.040513] env[62503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-745b484d-74e3-4a57-8b6c-bae7426ff6ea {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.051997] env[62503]: INFO nova.virt.vmwareapi.vm_util [None req-d3b4e233-4266-4cfe-a227-3bff2b6257fb tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Created folder: Project (f823912f7b1a4998a6dbc22060cf6c5e) in parent group-v294540. [ 885.052349] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3b4e233-4266-4cfe-a227-3bff2b6257fb tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Creating folder: Instances. Parent ref: group-v294589. {{(pid=62503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 885.052697] env[62503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-47e48455-4354-4ad6-923a-a28b2bf778ef {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.065420] env[62503]: INFO nova.virt.vmwareapi.vm_util [None req-d3b4e233-4266-4cfe-a227-3bff2b6257fb tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Created folder: Instances in parent group-v294589. [ 885.065420] env[62503]: DEBUG oslo.service.loopingcall [None req-d3b4e233-4266-4cfe-a227-3bff2b6257fb tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 885.065420] env[62503]: DEBUG oslo_concurrency.lockutils [req-fb96ea9c-23a8-49ac-bd36-a0ba556b9035 req-4eb540c5-1fca-45ce-a9ba-9b36a3ce15fc service nova] Releasing lock "refresh_cache-c9129f68-c755-4b78-b067-b77b01048c02" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 885.065420] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 16167e53-e45b-4b37-90c6-ab2c30ebf1aa] Creating VM on the ESX host {{(pid=62503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 885.065420] env[62503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a889ae3f-b37e-416d-be49-641be46b99ef {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.093574] env[62503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 885.093574] env[62503]: value = "task-1387838" [ 885.093574] env[62503]: _type = "Task" [ 885.093574] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.103213] env[62503]: DEBUG oslo_vmware.api [-] Task: {'id': task-1387838, 'name': CreateVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.290340] env[62503]: DEBUG oslo_vmware.api [None req-157c52a0-86d9-4396-8baf-ed1051ee6632 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Task: {'id': task-1387835, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.062108} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.290340] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-157c52a0-86d9-4396-8baf-ed1051ee6632 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: e693bcc2-3883-466d-913c-831146ca81e7] Extended root virtual disk {{(pid=62503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 885.291089] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-141f7055-1fa8-4694-930f-44bc4d8fbf72 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.314095] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-157c52a0-86d9-4396-8baf-ed1051ee6632 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: e693bcc2-3883-466d-913c-831146ca81e7] Reconfiguring VM instance instance-00000048 to attach disk [datastore2] e693bcc2-3883-466d-913c-831146ca81e7/e693bcc2-3883-466d-913c-831146ca81e7.vmdk or device None with type sparse {{(pid=62503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 885.316822] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7b1dc54c-bde5-4ce3-ba95-f5264f704348 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.335953] env[62503]: DEBUG oslo_vmware.api [None req-157c52a0-86d9-4396-8baf-ed1051ee6632 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Waiting for the task: (returnval){ [ 885.335953] env[62503]: value = "task-1387839" [ 885.335953] env[62503]: _type = "Task" [ 885.335953] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.343836] env[62503]: DEBUG oslo_vmware.api [None req-157c52a0-86d9-4396-8baf-ed1051ee6632 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Task: {'id': task-1387839, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.424728] env[62503]: DEBUG nova.network.neutron [req-10b426a2-981b-4b76-9625-987930deb218 req-6b6950bf-11b9-48fb-98e1-661e0b8cca2a service nova] [instance: b6fddb0d-70f5-433f-a0ef-0d6bffb35579] Updated VIF entry in instance network info cache for port 24ac3187-6729-47ea-beb6-4c96018b8a05. {{(pid=62503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 885.425382] env[62503]: DEBUG nova.network.neutron [req-10b426a2-981b-4b76-9625-987930deb218 req-6b6950bf-11b9-48fb-98e1-661e0b8cca2a service nova] [instance: b6fddb0d-70f5-433f-a0ef-0d6bffb35579] Updating instance_info_cache with network_info: [{"id": "24ac3187-6729-47ea-beb6-4c96018b8a05", "address": "fa:16:3e:f9:f1:4f", "network": {"id": "1705446b-db63-4b14-9929-b692ca586b36", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1437181237-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.196", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e818e5ee9dc24efa96747c9558514a15", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "77aa121f-8fb6-42f3-aaea-43addfe449b2", "external-id": "nsx-vlan-transportzone-288", "segmentation_id": 288, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap24ac3187-67", "ovs_interfaceid": "24ac3187-6729-47ea-beb6-4c96018b8a05", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 885.427431] env[62503]: DEBUG oslo_concurrency.lockutils [None req-bc583026-98f6-4899-b166-facb931d7a18 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.321s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 885.428063] env[62503]: DEBUG nova.compute.manager [None req-bc583026-98f6-4899-b166-facb931d7a18 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: ca1f1966-bfe1-495e-b055-f72150f72470] Start building networks asynchronously for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2832}} [ 885.430792] env[62503]: DEBUG oslo_concurrency.lockutils [None req-409bdb25-8a95-4fcd-b277-69a03419ff2a tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.452s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 885.437872] env[62503]: INFO nova.compute.claims [None req-409bdb25-8a95-4fcd-b277-69a03419ff2a tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] [instance: 629054bb-8fdb-45a2-8c07-216c4104d4a6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 885.457181] env[62503]: DEBUG oslo_vmware.api [None req-a01f7696-ade1-4d90-9188-a82a8c2369af tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]526eae00-330b-d8b0-ba2c-7f0bd767b236, 'name': SearchDatastore_Task, 'duration_secs': 0.06842} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.458304] env[62503]: DEBUG oslo_concurrency.lockutils [None req-a01f7696-ade1-4d90-9188-a82a8c2369af tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 885.458643] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-a01f7696-ade1-4d90-9188-a82a8c2369af tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk to [datastore2] c9129f68-c755-4b78-b067-b77b01048c02/c9129f68-c755-4b78-b067-b77b01048c02.vmdk {{(pid=62503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 885.458929] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-385a0e92-2ae3-4101-a960-39a720ca3a13 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.465707] env[62503]: DEBUG oslo_vmware.api [None req-a01f7696-ade1-4d90-9188-a82a8c2369af tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Waiting for the task: (returnval){ [ 885.465707] env[62503]: value = "task-1387840" [ 885.465707] env[62503]: _type = "Task" [ 885.465707] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.474432] env[62503]: DEBUG oslo_vmware.api [None req-a01f7696-ade1-4d90-9188-a82a8c2369af tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Task: {'id': task-1387840, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.603355] env[62503]: DEBUG oslo_vmware.api [-] Task: {'id': task-1387838, 'name': CreateVM_Task} progress is 99%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.847322] env[62503]: DEBUG oslo_vmware.api [None req-157c52a0-86d9-4396-8baf-ed1051ee6632 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Task: {'id': task-1387839, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.932630] env[62503]: DEBUG oslo_concurrency.lockutils [req-10b426a2-981b-4b76-9625-987930deb218 req-6b6950bf-11b9-48fb-98e1-661e0b8cca2a service nova] Releasing lock "refresh_cache-b6fddb0d-70f5-433f-a0ef-0d6bffb35579" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 885.940613] env[62503]: DEBUG nova.compute.utils [None req-bc583026-98f6-4899-b166-facb931d7a18 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Using /dev/sd instead of None {{(pid=62503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 885.947168] env[62503]: DEBUG nova.compute.manager [None req-bc583026-98f6-4899-b166-facb931d7a18 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: ca1f1966-bfe1-495e-b055-f72150f72470] Allocating IP information in the background. {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 885.950349] env[62503]: DEBUG nova.network.neutron [None req-bc583026-98f6-4899-b166-facb931d7a18 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: ca1f1966-bfe1-495e-b055-f72150f72470] allocate_for_instance() {{(pid=62503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 885.978287] env[62503]: DEBUG oslo_vmware.api [None req-a01f7696-ade1-4d90-9188-a82a8c2369af tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Task: {'id': task-1387840, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.049656] env[62503]: DEBUG nova.policy [None req-bc583026-98f6-4899-b166-facb931d7a18 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '05e8f1583c434155ae657655880ba2c6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '44139c74b4b349af996a67f408a8441f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62503) authorize /opt/stack/nova/nova/policy.py:201}} [ 886.104876] env[62503]: DEBUG oslo_vmware.api [-] Task: {'id': task-1387838, 'name': CreateVM_Task, 'duration_secs': 0.614696} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 886.104876] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 16167e53-e45b-4b37-90c6-ab2c30ebf1aa] Created VM on the ESX host {{(pid=62503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 886.105759] env[62503]: DEBUG oslo_concurrency.lockutils [None req-d3b4e233-4266-4cfe-a227-3bff2b6257fb tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 886.105954] env[62503]: DEBUG oslo_concurrency.lockutils [None req-d3b4e233-4266-4cfe-a227-3bff2b6257fb tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 886.106263] env[62503]: DEBUG oslo_concurrency.lockutils [None req-d3b4e233-4266-4cfe-a227-3bff2b6257fb tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 886.106944] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-40b2b755-6db9-4e1f-aaef-77d5ff4171f8 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.111695] env[62503]: DEBUG oslo_vmware.api [None req-d3b4e233-4266-4cfe-a227-3bff2b6257fb tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Waiting for the task: (returnval){ [ 886.111695] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]523c71c9-7fdf-82cd-2305-4de6696d6732" [ 886.111695] env[62503]: _type = "Task" [ 886.111695] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 886.122145] env[62503]: DEBUG oslo_vmware.api [None req-d3b4e233-4266-4cfe-a227-3bff2b6257fb tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]523c71c9-7fdf-82cd-2305-4de6696d6732, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.209022] env[62503]: DEBUG nova.network.neutron [req-aa7cc2b0-6ed9-4185-9053-63e66524f0c9 req-7c5d5263-01d5-4eee-a353-2cbf0db9465f service nova] [instance: 16167e53-e45b-4b37-90c6-ab2c30ebf1aa] Updated VIF entry in instance network info cache for port 632bec04-d9b9-4178-bb3c-104caa5ee159. {{(pid=62503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 886.209022] env[62503]: DEBUG nova.network.neutron [req-aa7cc2b0-6ed9-4185-9053-63e66524f0c9 req-7c5d5263-01d5-4eee-a353-2cbf0db9465f service nova] [instance: 16167e53-e45b-4b37-90c6-ab2c30ebf1aa] Updating instance_info_cache with network_info: [{"id": "632bec04-d9b9-4178-bb3c-104caa5ee159", "address": "fa:16:3e:f5:45:70", "network": {"id": "fd4d9644-88d7-4c3d-9bee-9e25ca3a7d0b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2096443058-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f823912f7b1a4998a6dbc22060cf6c5e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "685b4083-b748-41fb-a68a-273b1073fa28", "external-id": "nsx-vlan-transportzone-312", "segmentation_id": 312, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap632bec04-d9", "ovs_interfaceid": "632bec04-d9b9-4178-bb3c-104caa5ee159", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 886.346583] env[62503]: DEBUG oslo_vmware.api [None req-157c52a0-86d9-4396-8baf-ed1051ee6632 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Task: {'id': task-1387839, 'name': ReconfigVM_Task, 'duration_secs': 0.533417} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 886.346858] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-157c52a0-86d9-4396-8baf-ed1051ee6632 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: e693bcc2-3883-466d-913c-831146ca81e7] Reconfigured VM instance instance-00000048 to attach disk [datastore2] e693bcc2-3883-466d-913c-831146ca81e7/e693bcc2-3883-466d-913c-831146ca81e7.vmdk or device None with type sparse {{(pid=62503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 886.347819] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4e617480-b148-47b2-8251-fa6331fd5f4b {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.357074] env[62503]: DEBUG oslo_vmware.api [None req-157c52a0-86d9-4396-8baf-ed1051ee6632 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Waiting for the task: (returnval){ [ 886.357074] env[62503]: value = "task-1387841" [ 886.357074] env[62503]: _type = "Task" [ 886.357074] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 886.367770] env[62503]: DEBUG oslo_vmware.api [None req-157c52a0-86d9-4396-8baf-ed1051ee6632 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Task: {'id': task-1387841, 'name': Rename_Task} progress is 6%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.448140] env[62503]: DEBUG nova.compute.manager [None req-bc583026-98f6-4899-b166-facb931d7a18 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: ca1f1966-bfe1-495e-b055-f72150f72470] Start building block device mappings for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2867}} [ 886.482611] env[62503]: DEBUG oslo_vmware.api [None req-a01f7696-ade1-4d90-9188-a82a8c2369af tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Task: {'id': task-1387840, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.547234] env[62503]: DEBUG nova.network.neutron [None req-bc583026-98f6-4899-b166-facb931d7a18 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: ca1f1966-bfe1-495e-b055-f72150f72470] Successfully created port: 02a00a1f-6723-41e6-b91a-5e79048551ce {{(pid=62503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 886.623933] env[62503]: DEBUG oslo_vmware.api [None req-d3b4e233-4266-4cfe-a227-3bff2b6257fb tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]523c71c9-7fdf-82cd-2305-4de6696d6732, 'name': SearchDatastore_Task, 'duration_secs': 0.010097} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 886.624581] env[62503]: DEBUG oslo_concurrency.lockutils [None req-d3b4e233-4266-4cfe-a227-3bff2b6257fb tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 886.624826] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-d3b4e233-4266-4cfe-a227-3bff2b6257fb tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: 16167e53-e45b-4b37-90c6-ab2c30ebf1aa] Processing image 8150ca02-f879-471d-8913-459408f127a1 {{(pid=62503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 886.625086] env[62503]: DEBUG oslo_concurrency.lockutils [None req-d3b4e233-4266-4cfe-a227-3bff2b6257fb tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 886.625257] env[62503]: DEBUG oslo_concurrency.lockutils [None req-d3b4e233-4266-4cfe-a227-3bff2b6257fb tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 886.625443] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-d3b4e233-4266-4cfe-a227-3bff2b6257fb tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 886.625673] env[62503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-02bd49d9-246f-4fb0-879f-0dc06056b122 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.639966] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20fc6338-d8e4-4082-a6d9-a85671ee19b8 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.643587] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-d3b4e233-4266-4cfe-a227-3bff2b6257fb tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 886.643765] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-d3b4e233-4266-4cfe-a227-3bff2b6257fb tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 886.644780] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-73853465-c1d6-40b6-9e36-fa3bd5b12654 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.649739] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa8575b2-de48-4ce1-aa81-3a771945d55c {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.654745] env[62503]: DEBUG oslo_vmware.api [None req-d3b4e233-4266-4cfe-a227-3bff2b6257fb tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Waiting for the task: (returnval){ [ 886.654745] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52ae0ff4-badc-ee13-f3cc-f347cc46fcfa" [ 886.654745] env[62503]: _type = "Task" [ 886.654745] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 886.685794] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7dc5c3cd-8661-4af8-8a97-996459e322d1 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.691488] env[62503]: DEBUG oslo_vmware.api [None req-d3b4e233-4266-4cfe-a227-3bff2b6257fb tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52ae0ff4-badc-ee13-f3cc-f347cc46fcfa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.696673] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbd57ab0-6598-41ff-bf70-0c20d8d1295f {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.709350] env[62503]: DEBUG oslo_concurrency.lockutils [req-aa7cc2b0-6ed9-4185-9053-63e66524f0c9 req-7c5d5263-01d5-4eee-a353-2cbf0db9465f service nova] Releasing lock "refresh_cache-16167e53-e45b-4b37-90c6-ab2c30ebf1aa" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 886.709959] env[62503]: DEBUG nova.compute.provider_tree [None req-409bdb25-8a95-4fcd-b277-69a03419ff2a tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 886.866109] env[62503]: DEBUG oslo_concurrency.lockutils [None req-4fdc7626-9734-4113-a7b4-a544a2ca22f6 tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] Acquiring lock "cf611345-d276-4745-a2f8-0551c9dca2c2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 886.866368] env[62503]: DEBUG oslo_concurrency.lockutils [None req-4fdc7626-9734-4113-a7b4-a544a2ca22f6 tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] Lock "cf611345-d276-4745-a2f8-0551c9dca2c2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 886.866583] env[62503]: DEBUG oslo_concurrency.lockutils [None req-4fdc7626-9734-4113-a7b4-a544a2ca22f6 tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] Acquiring lock "cf611345-d276-4745-a2f8-0551c9dca2c2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 886.866764] env[62503]: DEBUG oslo_concurrency.lockutils [None req-4fdc7626-9734-4113-a7b4-a544a2ca22f6 tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] Lock "cf611345-d276-4745-a2f8-0551c9dca2c2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 886.866938] env[62503]: DEBUG oslo_concurrency.lockutils [None req-4fdc7626-9734-4113-a7b4-a544a2ca22f6 tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] Lock "cf611345-d276-4745-a2f8-0551c9dca2c2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 886.868586] env[62503]: DEBUG oslo_vmware.api [None req-157c52a0-86d9-4396-8baf-ed1051ee6632 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Task: {'id': task-1387841, 'name': Rename_Task} progress is 14%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.869158] env[62503]: INFO nova.compute.manager [None req-4fdc7626-9734-4113-a7b4-a544a2ca22f6 tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] [instance: cf611345-d276-4745-a2f8-0551c9dca2c2] Terminating instance [ 886.870877] env[62503]: DEBUG nova.compute.manager [None req-4fdc7626-9734-4113-a7b4-a544a2ca22f6 tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] [instance: cf611345-d276-4745-a2f8-0551c9dca2c2] Start destroying the instance on the hypervisor. {{(pid=62503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3156}} [ 886.871083] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-4fdc7626-9734-4113-a7b4-a544a2ca22f6 tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] [instance: cf611345-d276-4745-a2f8-0551c9dca2c2] Destroying instance {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 886.871879] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61339cb5-2bf5-4b74-84e1-1743ed1bffb8 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.879139] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-4fdc7626-9734-4113-a7b4-a544a2ca22f6 tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] [instance: cf611345-d276-4745-a2f8-0551c9dca2c2] Powering off the VM {{(pid=62503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 886.879480] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-115b78ac-3187-4ce6-bff1-ebc50a548940 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.886545] env[62503]: DEBUG oslo_vmware.api [None req-4fdc7626-9734-4113-a7b4-a544a2ca22f6 tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] Waiting for the task: (returnval){ [ 886.886545] env[62503]: value = "task-1387842" [ 886.886545] env[62503]: _type = "Task" [ 886.886545] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 886.894293] env[62503]: DEBUG oslo_vmware.api [None req-4fdc7626-9734-4113-a7b4-a544a2ca22f6 tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] Task: {'id': task-1387842, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.958936] env[62503]: DEBUG nova.compute.manager [req-e75a3890-bd0c-4ae2-98c6-e4d024f5edd9 req-6ebc8921-6e71-4b8e-bb99-2b08864325c5 service nova] [instance: cf611345-d276-4745-a2f8-0551c9dca2c2] Received event network-changed-6a859ef4-58a2-46ba-9e2e-b0857df9cfea {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 886.959194] env[62503]: DEBUG nova.compute.manager [req-e75a3890-bd0c-4ae2-98c6-e4d024f5edd9 req-6ebc8921-6e71-4b8e-bb99-2b08864325c5 service nova] [instance: cf611345-d276-4745-a2f8-0551c9dca2c2] Refreshing instance network info cache due to event network-changed-6a859ef4-58a2-46ba-9e2e-b0857df9cfea. {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11432}} [ 886.959469] env[62503]: DEBUG oslo_concurrency.lockutils [req-e75a3890-bd0c-4ae2-98c6-e4d024f5edd9 req-6ebc8921-6e71-4b8e-bb99-2b08864325c5 service nova] Acquiring lock "refresh_cache-cf611345-d276-4745-a2f8-0551c9dca2c2" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 886.959654] env[62503]: DEBUG oslo_concurrency.lockutils [req-e75a3890-bd0c-4ae2-98c6-e4d024f5edd9 req-6ebc8921-6e71-4b8e-bb99-2b08864325c5 service nova] Acquired lock "refresh_cache-cf611345-d276-4745-a2f8-0551c9dca2c2" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 886.959846] env[62503]: DEBUG nova.network.neutron [req-e75a3890-bd0c-4ae2-98c6-e4d024f5edd9 req-6ebc8921-6e71-4b8e-bb99-2b08864325c5 service nova] [instance: cf611345-d276-4745-a2f8-0551c9dca2c2] Refreshing network info cache for port 6a859ef4-58a2-46ba-9e2e-b0857df9cfea {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 886.979083] env[62503]: DEBUG oslo_vmware.api [None req-a01f7696-ade1-4d90-9188-a82a8c2369af tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Task: {'id': task-1387840, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.482794} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 886.979434] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-a01f7696-ade1-4d90-9188-a82a8c2369af tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk to [datastore2] c9129f68-c755-4b78-b067-b77b01048c02/c9129f68-c755-4b78-b067-b77b01048c02.vmdk {{(pid=62503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 886.979696] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-a01f7696-ade1-4d90-9188-a82a8c2369af tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: c9129f68-c755-4b78-b067-b77b01048c02] Extending root virtual disk to 1048576 {{(pid=62503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 886.980242] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-38a86068-2482-4690-b208-a81ac6d1a5b1 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.987816] env[62503]: DEBUG oslo_vmware.api [None req-a01f7696-ade1-4d90-9188-a82a8c2369af tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Waiting for the task: (returnval){ [ 886.987816] env[62503]: value = "task-1387843" [ 886.987816] env[62503]: _type = "Task" [ 886.987816] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 886.996017] env[62503]: DEBUG oslo_vmware.api [None req-a01f7696-ade1-4d90-9188-a82a8c2369af tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Task: {'id': task-1387843, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.169559] env[62503]: DEBUG oslo_vmware.api [None req-d3b4e233-4266-4cfe-a227-3bff2b6257fb tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52ae0ff4-badc-ee13-f3cc-f347cc46fcfa, 'name': SearchDatastore_Task, 'duration_secs': 0.038904} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 887.170749] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d4ba5e06-b059-4af6-ba34-5de44ac4f36f {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.177393] env[62503]: DEBUG oslo_vmware.api [None req-d3b4e233-4266-4cfe-a227-3bff2b6257fb tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Waiting for the task: (returnval){ [ 887.177393] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52ee0aaf-8280-3fca-6597-cfba69ea44cd" [ 887.177393] env[62503]: _type = "Task" [ 887.177393] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 887.188423] env[62503]: DEBUG oslo_vmware.api [None req-d3b4e233-4266-4cfe-a227-3bff2b6257fb tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52ee0aaf-8280-3fca-6597-cfba69ea44cd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.212904] env[62503]: DEBUG nova.scheduler.client.report [None req-409bdb25-8a95-4fcd-b277-69a03419ff2a tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 887.367451] env[62503]: DEBUG oslo_vmware.api [None req-157c52a0-86d9-4396-8baf-ed1051ee6632 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Task: {'id': task-1387841, 'name': Rename_Task, 'duration_secs': 0.784936} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 887.367780] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-157c52a0-86d9-4396-8baf-ed1051ee6632 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: e693bcc2-3883-466d-913c-831146ca81e7] Powering on the VM {{(pid=62503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 887.368167] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e683ed4a-b731-4260-a4cf-555d1d6cd8ea {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.375785] env[62503]: DEBUG oslo_vmware.api [None req-157c52a0-86d9-4396-8baf-ed1051ee6632 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Waiting for the task: (returnval){ [ 887.375785] env[62503]: value = "task-1387844" [ 887.375785] env[62503]: _type = "Task" [ 887.375785] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 887.386430] env[62503]: DEBUG oslo_vmware.api [None req-157c52a0-86d9-4396-8baf-ed1051ee6632 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Task: {'id': task-1387844, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.397663] env[62503]: DEBUG oslo_vmware.api [None req-4fdc7626-9734-4113-a7b4-a544a2ca22f6 tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] Task: {'id': task-1387842, 'name': PowerOffVM_Task, 'duration_secs': 0.261423} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 887.397917] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-4fdc7626-9734-4113-a7b4-a544a2ca22f6 tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] [instance: cf611345-d276-4745-a2f8-0551c9dca2c2] Powered off the VM {{(pid=62503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 887.398120] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-4fdc7626-9734-4113-a7b4-a544a2ca22f6 tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] [instance: cf611345-d276-4745-a2f8-0551c9dca2c2] Unregistering the VM {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 887.398394] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ea14d11e-b44c-492f-b63e-9d781c5c0bdc {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.458698] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-4fdc7626-9734-4113-a7b4-a544a2ca22f6 tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] [instance: cf611345-d276-4745-a2f8-0551c9dca2c2] Unregistered the VM {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 887.458936] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-4fdc7626-9734-4113-a7b4-a544a2ca22f6 tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] [instance: cf611345-d276-4745-a2f8-0551c9dca2c2] Deleting contents of the VM from datastore datastore1 {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 887.459137] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-4fdc7626-9734-4113-a7b4-a544a2ca22f6 tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] Deleting the datastore file [datastore1] cf611345-d276-4745-a2f8-0551c9dca2c2 {{(pid=62503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 887.459404] env[62503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7945a502-3168-4c8d-8586-41ea4dc4dc26 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.466632] env[62503]: DEBUG nova.compute.manager [None req-bc583026-98f6-4899-b166-facb931d7a18 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: ca1f1966-bfe1-495e-b055-f72150f72470] Start spawning the instance on the hypervisor. {{(pid=62503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2641}} [ 887.474895] env[62503]: DEBUG oslo_vmware.api [None req-4fdc7626-9734-4113-a7b4-a544a2ca22f6 tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] Waiting for the task: (returnval){ [ 887.474895] env[62503]: value = "task-1387846" [ 887.474895] env[62503]: _type = "Task" [ 887.474895] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 887.486648] env[62503]: DEBUG oslo_vmware.api [None req-4fdc7626-9734-4113-a7b4-a544a2ca22f6 tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] Task: {'id': task-1387846, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.498359] env[62503]: DEBUG oslo_vmware.api [None req-a01f7696-ade1-4d90-9188-a82a8c2369af tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Task: {'id': task-1387843, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.128818} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 887.500843] env[62503]: DEBUG nova.virt.hardware [None req-bc583026-98f6-4899-b166-facb931d7a18 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:26:20Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:26:03Z,direct_url=,disk_format='vmdk',id=8150ca02-f879-471d-8913-459408f127a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26d9ee75d25b4018b8bb1fb11c8bd98c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:26:04Z,virtual_size=,visibility=), allow threads: False {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 887.501090] env[62503]: DEBUG nova.virt.hardware [None req-bc583026-98f6-4899-b166-facb931d7a18 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Flavor limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 887.501285] env[62503]: DEBUG nova.virt.hardware [None req-bc583026-98f6-4899-b166-facb931d7a18 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Image limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 887.501487] env[62503]: DEBUG nova.virt.hardware [None req-bc583026-98f6-4899-b166-facb931d7a18 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Flavor pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 887.501663] env[62503]: DEBUG nova.virt.hardware [None req-bc583026-98f6-4899-b166-facb931d7a18 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Image pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 887.501819] env[62503]: DEBUG nova.virt.hardware [None req-bc583026-98f6-4899-b166-facb931d7a18 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 887.502146] env[62503]: DEBUG nova.virt.hardware [None req-bc583026-98f6-4899-b166-facb931d7a18 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 887.502387] env[62503]: DEBUG nova.virt.hardware [None req-bc583026-98f6-4899-b166-facb931d7a18 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 887.502589] env[62503]: DEBUG nova.virt.hardware [None req-bc583026-98f6-4899-b166-facb931d7a18 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Got 1 possible topologies {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 887.502774] env[62503]: DEBUG nova.virt.hardware [None req-bc583026-98f6-4899-b166-facb931d7a18 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 887.502967] env[62503]: DEBUG nova.virt.hardware [None req-bc583026-98f6-4899-b166-facb931d7a18 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 887.503492] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-a01f7696-ade1-4d90-9188-a82a8c2369af tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: c9129f68-c755-4b78-b067-b77b01048c02] Extended root virtual disk {{(pid=62503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 887.504060] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa42a65e-c2b4-41a1-87ee-266fa85507fa {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.508823] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19066544-423d-4b73-8c0f-30415959efdd {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.525257] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db140050-c536-4f29-a69c-553938a7f07a {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.538539] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-a01f7696-ade1-4d90-9188-a82a8c2369af tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: c9129f68-c755-4b78-b067-b77b01048c02] Reconfiguring VM instance instance-00000049 to attach disk [datastore2] c9129f68-c755-4b78-b067-b77b01048c02/c9129f68-c755-4b78-b067-b77b01048c02.vmdk or device None with type sparse {{(pid=62503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 887.538539] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-001e2cac-9bc6-4428-b0bd-0bb67c12334b {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.565884] env[62503]: DEBUG oslo_vmware.api [None req-a01f7696-ade1-4d90-9188-a82a8c2369af tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Waiting for the task: (returnval){ [ 887.565884] env[62503]: value = "task-1387847" [ 887.565884] env[62503]: _type = "Task" [ 887.565884] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 887.575454] env[62503]: DEBUG oslo_vmware.api [None req-a01f7696-ade1-4d90-9188-a82a8c2369af tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Task: {'id': task-1387847, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.686609] env[62503]: DEBUG oslo_vmware.api [None req-d3b4e233-4266-4cfe-a227-3bff2b6257fb tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52ee0aaf-8280-3fca-6597-cfba69ea44cd, 'name': SearchDatastore_Task, 'duration_secs': 0.010355} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 887.687393] env[62503]: DEBUG nova.network.neutron [req-e75a3890-bd0c-4ae2-98c6-e4d024f5edd9 req-6ebc8921-6e71-4b8e-bb99-2b08864325c5 service nova] [instance: cf611345-d276-4745-a2f8-0551c9dca2c2] Updated VIF entry in instance network info cache for port 6a859ef4-58a2-46ba-9e2e-b0857df9cfea. {{(pid=62503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 887.687717] env[62503]: DEBUG nova.network.neutron [req-e75a3890-bd0c-4ae2-98c6-e4d024f5edd9 req-6ebc8921-6e71-4b8e-bb99-2b08864325c5 service nova] [instance: cf611345-d276-4745-a2f8-0551c9dca2c2] Updating instance_info_cache with network_info: [{"id": "6a859ef4-58a2-46ba-9e2e-b0857df9cfea", "address": "fa:16:3e:b1:7b:26", "network": {"id": "f6605088-4934-47d3-a3a3-7d24821eaddf", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1250737836-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d404225d6f9c46148e0b7080ec1eee99", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "16c6ea68-9b0e-4ac0-a484-7a9a40533017", "external-id": "nsx-vlan-transportzone-384", "segmentation_id": 384, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6a859ef4-58", "ovs_interfaceid": "6a859ef4-58a2-46ba-9e2e-b0857df9cfea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 887.689067] env[62503]: DEBUG oslo_concurrency.lockutils [None req-d3b4e233-4266-4cfe-a227-3bff2b6257fb tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 887.689160] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3b4e233-4266-4cfe-a227-3bff2b6257fb tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk to [datastore2] 16167e53-e45b-4b37-90c6-ab2c30ebf1aa/16167e53-e45b-4b37-90c6-ab2c30ebf1aa.vmdk {{(pid=62503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 887.689395] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1ca0e373-c8d2-44c8-a0f8-3495671c8359 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.698092] env[62503]: DEBUG oslo_vmware.api [None req-d3b4e233-4266-4cfe-a227-3bff2b6257fb tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Waiting for the task: (returnval){ [ 887.698092] env[62503]: value = "task-1387848" [ 887.698092] env[62503]: _type = "Task" [ 887.698092] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 887.704977] env[62503]: DEBUG oslo_vmware.api [None req-d3b4e233-4266-4cfe-a227-3bff2b6257fb tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Task: {'id': task-1387848, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.720537] env[62503]: DEBUG oslo_concurrency.lockutils [None req-409bdb25-8a95-4fcd-b277-69a03419ff2a tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.290s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 887.721036] env[62503]: DEBUG nova.compute.manager [None req-409bdb25-8a95-4fcd-b277-69a03419ff2a tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] [instance: 629054bb-8fdb-45a2-8c07-216c4104d4a6] Start building networks asynchronously for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2832}} [ 887.724187] env[62503]: DEBUG oslo_concurrency.lockutils [None req-784ff18d-15f2-4cb8-8623-ff87e0cf2977 tempest-InstanceActionsTestJSON-257954437 tempest-InstanceActionsTestJSON-257954437-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.732s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 887.725720] env[62503]: INFO nova.compute.claims [None req-784ff18d-15f2-4cb8-8623-ff87e0cf2977 tempest-InstanceActionsTestJSON-257954437 tempest-InstanceActionsTestJSON-257954437-project-member] [instance: c39e7ee3-1b97-44ec-92d6-733976c0f0f8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 887.888696] env[62503]: DEBUG oslo_vmware.api [None req-157c52a0-86d9-4396-8baf-ed1051ee6632 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Task: {'id': task-1387844, 'name': PowerOnVM_Task, 'duration_secs': 0.445467} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 887.889109] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-157c52a0-86d9-4396-8baf-ed1051ee6632 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: e693bcc2-3883-466d-913c-831146ca81e7] Powered on the VM {{(pid=62503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 887.889467] env[62503]: INFO nova.compute.manager [None req-157c52a0-86d9-4396-8baf-ed1051ee6632 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: e693bcc2-3883-466d-913c-831146ca81e7] Took 10.16 seconds to spawn the instance on the hypervisor. [ 887.889689] env[62503]: DEBUG nova.compute.manager [None req-157c52a0-86d9-4396-8baf-ed1051ee6632 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: e693bcc2-3883-466d-913c-831146ca81e7] Checking state {{(pid=62503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1794}} [ 887.890705] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-347f18d9-695f-446c-9390-700848a278b8 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.987678] env[62503]: DEBUG oslo_vmware.api [None req-4fdc7626-9734-4113-a7b4-a544a2ca22f6 tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] Task: {'id': task-1387846, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.160814} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 887.988111] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-4fdc7626-9734-4113-a7b4-a544a2ca22f6 tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] Deleted the datastore file {{(pid=62503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 887.988339] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-4fdc7626-9734-4113-a7b4-a544a2ca22f6 tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] [instance: cf611345-d276-4745-a2f8-0551c9dca2c2] Deleted contents of the VM from datastore datastore1 {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 887.988429] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-4fdc7626-9734-4113-a7b4-a544a2ca22f6 tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] [instance: cf611345-d276-4745-a2f8-0551c9dca2c2] Instance destroyed {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 887.988678] env[62503]: INFO nova.compute.manager [None req-4fdc7626-9734-4113-a7b4-a544a2ca22f6 tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] [instance: cf611345-d276-4745-a2f8-0551c9dca2c2] Took 1.12 seconds to destroy the instance on the hypervisor. [ 887.989220] env[62503]: DEBUG oslo.service.loopingcall [None req-4fdc7626-9734-4113-a7b4-a544a2ca22f6 tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 887.989503] env[62503]: DEBUG nova.compute.manager [-] [instance: cf611345-d276-4745-a2f8-0551c9dca2c2] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 887.989627] env[62503]: DEBUG nova.network.neutron [-] [instance: cf611345-d276-4745-a2f8-0551c9dca2c2] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 888.078488] env[62503]: DEBUG oslo_vmware.api [None req-a01f7696-ade1-4d90-9188-a82a8c2369af tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Task: {'id': task-1387847, 'name': ReconfigVM_Task, 'duration_secs': 0.410044} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 888.078847] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-a01f7696-ade1-4d90-9188-a82a8c2369af tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: c9129f68-c755-4b78-b067-b77b01048c02] Reconfigured VM instance instance-00000049 to attach disk [datastore2] c9129f68-c755-4b78-b067-b77b01048c02/c9129f68-c755-4b78-b067-b77b01048c02.vmdk or device None with type sparse {{(pid=62503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 888.079568] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-83df1a33-3e13-48c1-be3c-deffff0e274c {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.086658] env[62503]: DEBUG oslo_vmware.api [None req-a01f7696-ade1-4d90-9188-a82a8c2369af tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Waiting for the task: (returnval){ [ 888.086658] env[62503]: value = "task-1387849" [ 888.086658] env[62503]: _type = "Task" [ 888.086658] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.096322] env[62503]: DEBUG oslo_vmware.api [None req-a01f7696-ade1-4d90-9188-a82a8c2369af tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Task: {'id': task-1387849, 'name': Rename_Task} progress is 5%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.193541] env[62503]: DEBUG oslo_concurrency.lockutils [req-e75a3890-bd0c-4ae2-98c6-e4d024f5edd9 req-6ebc8921-6e71-4b8e-bb99-2b08864325c5 service nova] Releasing lock "refresh_cache-cf611345-d276-4745-a2f8-0551c9dca2c2" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 888.206465] env[62503]: DEBUG oslo_vmware.api [None req-d3b4e233-4266-4cfe-a227-3bff2b6257fb tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Task: {'id': task-1387848, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.501671} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 888.206736] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3b4e233-4266-4cfe-a227-3bff2b6257fb tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk to [datastore2] 16167e53-e45b-4b37-90c6-ab2c30ebf1aa/16167e53-e45b-4b37-90c6-ab2c30ebf1aa.vmdk {{(pid=62503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 888.206958] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-d3b4e233-4266-4cfe-a227-3bff2b6257fb tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: 16167e53-e45b-4b37-90c6-ab2c30ebf1aa] Extending root virtual disk to 1048576 {{(pid=62503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 888.207233] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8fc76a04-e78e-4d48-b2b5-71587d5d38a8 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.213429] env[62503]: DEBUG oslo_vmware.api [None req-d3b4e233-4266-4cfe-a227-3bff2b6257fb tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Waiting for the task: (returnval){ [ 888.213429] env[62503]: value = "task-1387850" [ 888.213429] env[62503]: _type = "Task" [ 888.213429] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.221805] env[62503]: DEBUG oslo_vmware.api [None req-d3b4e233-4266-4cfe-a227-3bff2b6257fb tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Task: {'id': task-1387850, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.225102] env[62503]: DEBUG nova.compute.utils [None req-409bdb25-8a95-4fcd-b277-69a03419ff2a tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Using /dev/sd instead of None {{(pid=62503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 888.226449] env[62503]: DEBUG nova.compute.manager [None req-409bdb25-8a95-4fcd-b277-69a03419ff2a tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] [instance: 629054bb-8fdb-45a2-8c07-216c4104d4a6] Allocating IP information in the background. {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 888.226623] env[62503]: DEBUG nova.network.neutron [None req-409bdb25-8a95-4fcd-b277-69a03419ff2a tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] [instance: 629054bb-8fdb-45a2-8c07-216c4104d4a6] allocate_for_instance() {{(pid=62503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 888.275578] env[62503]: DEBUG nova.policy [None req-409bdb25-8a95-4fcd-b277-69a03419ff2a tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bbe8eb64be214c2d856e3f4576edbf72', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'eac2608fc52a497f961d018c888a826f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62503) authorize /opt/stack/nova/nova/policy.py:201}} [ 888.409915] env[62503]: INFO nova.compute.manager [None req-157c52a0-86d9-4396-8baf-ed1051ee6632 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: e693bcc2-3883-466d-913c-831146ca81e7] Took 26.07 seconds to build instance. [ 888.549579] env[62503]: DEBUG nova.network.neutron [None req-409bdb25-8a95-4fcd-b277-69a03419ff2a tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] [instance: 629054bb-8fdb-45a2-8c07-216c4104d4a6] Successfully created port: 269d53b9-4861-484d-bdac-553a7fafc310 {{(pid=62503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 888.602744] env[62503]: DEBUG oslo_vmware.api [None req-a01f7696-ade1-4d90-9188-a82a8c2369af tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Task: {'id': task-1387849, 'name': Rename_Task, 'duration_secs': 0.198503} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 888.603720] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-a01f7696-ade1-4d90-9188-a82a8c2369af tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: c9129f68-c755-4b78-b067-b77b01048c02] Powering on the VM {{(pid=62503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 888.604057] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4434c29b-cf13-45d4-988d-c53de362b2a9 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.611171] env[62503]: DEBUG oslo_vmware.api [None req-a01f7696-ade1-4d90-9188-a82a8c2369af tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Waiting for the task: (returnval){ [ 888.611171] env[62503]: value = "task-1387851" [ 888.611171] env[62503]: _type = "Task" [ 888.611171] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.619124] env[62503]: DEBUG oslo_vmware.api [None req-a01f7696-ade1-4d90-9188-a82a8c2369af tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Task: {'id': task-1387851, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.677015] env[62503]: DEBUG nova.network.neutron [None req-bc583026-98f6-4899-b166-facb931d7a18 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: ca1f1966-bfe1-495e-b055-f72150f72470] Successfully updated port: 02a00a1f-6723-41e6-b91a-5e79048551ce {{(pid=62503) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 888.723414] env[62503]: DEBUG oslo_vmware.api [None req-d3b4e233-4266-4cfe-a227-3bff2b6257fb tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Task: {'id': task-1387850, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.061971} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 888.723695] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-d3b4e233-4266-4cfe-a227-3bff2b6257fb tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: 16167e53-e45b-4b37-90c6-ab2c30ebf1aa] Extended root virtual disk {{(pid=62503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 888.724543] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae277159-0be7-46ec-8e6c-38d15d4f205d {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.740779] env[62503]: DEBUG nova.compute.manager [None req-409bdb25-8a95-4fcd-b277-69a03419ff2a tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] [instance: 629054bb-8fdb-45a2-8c07-216c4104d4a6] Start building block device mappings for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2867}} [ 888.758868] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-d3b4e233-4266-4cfe-a227-3bff2b6257fb tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: 16167e53-e45b-4b37-90c6-ab2c30ebf1aa] Reconfiguring VM instance instance-0000004a to attach disk [datastore2] 16167e53-e45b-4b37-90c6-ab2c30ebf1aa/16167e53-e45b-4b37-90c6-ab2c30ebf1aa.vmdk or device None with type sparse {{(pid=62503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 888.760051] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cee4ac57-ffec-42ac-b93b-d86cdb946459 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.785279] env[62503]: DEBUG oslo_vmware.api [None req-d3b4e233-4266-4cfe-a227-3bff2b6257fb tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Waiting for the task: (returnval){ [ 888.785279] env[62503]: value = "task-1387852" [ 888.785279] env[62503]: _type = "Task" [ 888.785279] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.796424] env[62503]: DEBUG oslo_vmware.api [None req-d3b4e233-4266-4cfe-a227-3bff2b6257fb tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Task: {'id': task-1387852, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.799917] env[62503]: DEBUG nova.network.neutron [-] [instance: cf611345-d276-4745-a2f8-0551c9dca2c2] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 888.913212] env[62503]: DEBUG oslo_concurrency.lockutils [None req-157c52a0-86d9-4396-8baf-ed1051ee6632 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Lock "e693bcc2-3883-466d-913c-831146ca81e7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 36.066s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 888.923492] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8cfa149-c408-48be-b60a-787c8da43c08 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.931771] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ca9d0ce-44e9-425e-bbad-49697c07adf6 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.962255] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91dbf908-97e8-40a5-b367-262f446eded8 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.970330] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf2c8c61-e26d-4fea-a9cf-6be8277648e6 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.983602] env[62503]: DEBUG nova.compute.provider_tree [None req-784ff18d-15f2-4cb8-8623-ff87e0cf2977 tempest-InstanceActionsTestJSON-257954437 tempest-InstanceActionsTestJSON-257954437-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 888.986944] env[62503]: DEBUG nova.compute.manager [req-744897e0-dc27-4837-a2fe-1de0aa123ec6 req-3b93b1c5-2a4b-4044-a58d-14ad42642b9d service nova] [instance: cf611345-d276-4745-a2f8-0551c9dca2c2] Received event network-vif-deleted-6a859ef4-58a2-46ba-9e2e-b0857df9cfea {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 888.987160] env[62503]: DEBUG nova.compute.manager [req-744897e0-dc27-4837-a2fe-1de0aa123ec6 req-3b93b1c5-2a4b-4044-a58d-14ad42642b9d service nova] [instance: ca1f1966-bfe1-495e-b055-f72150f72470] Received event network-vif-plugged-02a00a1f-6723-41e6-b91a-5e79048551ce {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 888.987549] env[62503]: DEBUG oslo_concurrency.lockutils [req-744897e0-dc27-4837-a2fe-1de0aa123ec6 req-3b93b1c5-2a4b-4044-a58d-14ad42642b9d service nova] Acquiring lock "ca1f1966-bfe1-495e-b055-f72150f72470-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 888.987627] env[62503]: DEBUG oslo_concurrency.lockutils [req-744897e0-dc27-4837-a2fe-1de0aa123ec6 req-3b93b1c5-2a4b-4044-a58d-14ad42642b9d service nova] Lock "ca1f1966-bfe1-495e-b055-f72150f72470-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 888.987793] env[62503]: DEBUG oslo_concurrency.lockutils [req-744897e0-dc27-4837-a2fe-1de0aa123ec6 req-3b93b1c5-2a4b-4044-a58d-14ad42642b9d service nova] Lock "ca1f1966-bfe1-495e-b055-f72150f72470-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 888.989113] env[62503]: DEBUG nova.compute.manager [req-744897e0-dc27-4837-a2fe-1de0aa123ec6 req-3b93b1c5-2a4b-4044-a58d-14ad42642b9d service nova] [instance: ca1f1966-bfe1-495e-b055-f72150f72470] No waiting events found dispatching network-vif-plugged-02a00a1f-6723-41e6-b91a-5e79048551ce {{(pid=62503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 888.989113] env[62503]: WARNING nova.compute.manager [req-744897e0-dc27-4837-a2fe-1de0aa123ec6 req-3b93b1c5-2a4b-4044-a58d-14ad42642b9d service nova] [instance: ca1f1966-bfe1-495e-b055-f72150f72470] Received unexpected event network-vif-plugged-02a00a1f-6723-41e6-b91a-5e79048551ce for instance with vm_state building and task_state spawning. [ 888.989113] env[62503]: DEBUG nova.compute.manager [req-744897e0-dc27-4837-a2fe-1de0aa123ec6 req-3b93b1c5-2a4b-4044-a58d-14ad42642b9d service nova] [instance: ca1f1966-bfe1-495e-b055-f72150f72470] Received event network-changed-02a00a1f-6723-41e6-b91a-5e79048551ce {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 888.989113] env[62503]: DEBUG nova.compute.manager [req-744897e0-dc27-4837-a2fe-1de0aa123ec6 req-3b93b1c5-2a4b-4044-a58d-14ad42642b9d service nova] [instance: ca1f1966-bfe1-495e-b055-f72150f72470] Refreshing instance network info cache due to event network-changed-02a00a1f-6723-41e6-b91a-5e79048551ce. {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11432}} [ 888.989113] env[62503]: DEBUG oslo_concurrency.lockutils [req-744897e0-dc27-4837-a2fe-1de0aa123ec6 req-3b93b1c5-2a4b-4044-a58d-14ad42642b9d service nova] Acquiring lock "refresh_cache-ca1f1966-bfe1-495e-b055-f72150f72470" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 888.989566] env[62503]: DEBUG oslo_concurrency.lockutils [req-744897e0-dc27-4837-a2fe-1de0aa123ec6 req-3b93b1c5-2a4b-4044-a58d-14ad42642b9d service nova] Acquired lock "refresh_cache-ca1f1966-bfe1-495e-b055-f72150f72470" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 888.989566] env[62503]: DEBUG nova.network.neutron [req-744897e0-dc27-4837-a2fe-1de0aa123ec6 req-3b93b1c5-2a4b-4044-a58d-14ad42642b9d service nova] [instance: ca1f1966-bfe1-495e-b055-f72150f72470] Refreshing network info cache for port 02a00a1f-6723-41e6-b91a-5e79048551ce {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 889.122909] env[62503]: DEBUG oslo_vmware.api [None req-a01f7696-ade1-4d90-9188-a82a8c2369af tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Task: {'id': task-1387851, 'name': PowerOnVM_Task} progress is 71%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.183114] env[62503]: DEBUG oslo_concurrency.lockutils [None req-bc583026-98f6-4899-b166-facb931d7a18 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Acquiring lock "refresh_cache-ca1f1966-bfe1-495e-b055-f72150f72470" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 889.296476] env[62503]: DEBUG oslo_vmware.api [None req-d3b4e233-4266-4cfe-a227-3bff2b6257fb tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Task: {'id': task-1387852, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.302156] env[62503]: INFO nova.compute.manager [-] [instance: cf611345-d276-4745-a2f8-0551c9dca2c2] Took 1.31 seconds to deallocate network for instance. [ 889.494952] env[62503]: DEBUG nova.scheduler.client.report [None req-784ff18d-15f2-4cb8-8623-ff87e0cf2977 tempest-InstanceActionsTestJSON-257954437 tempest-InstanceActionsTestJSON-257954437-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 889.568043] env[62503]: DEBUG nova.network.neutron [req-744897e0-dc27-4837-a2fe-1de0aa123ec6 req-3b93b1c5-2a4b-4044-a58d-14ad42642b9d service nova] [instance: ca1f1966-bfe1-495e-b055-f72150f72470] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 889.622114] env[62503]: DEBUG oslo_vmware.api [None req-a01f7696-ade1-4d90-9188-a82a8c2369af tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Task: {'id': task-1387851, 'name': PowerOnVM_Task} progress is 81%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.735795] env[62503]: DEBUG nova.network.neutron [req-744897e0-dc27-4837-a2fe-1de0aa123ec6 req-3b93b1c5-2a4b-4044-a58d-14ad42642b9d service nova] [instance: ca1f1966-bfe1-495e-b055-f72150f72470] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 889.767720] env[62503]: DEBUG nova.compute.manager [None req-409bdb25-8a95-4fcd-b277-69a03419ff2a tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] [instance: 629054bb-8fdb-45a2-8c07-216c4104d4a6] Start spawning the instance on the hypervisor. {{(pid=62503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2641}} [ 889.798896] env[62503]: DEBUG oslo_vmware.api [None req-d3b4e233-4266-4cfe-a227-3bff2b6257fb tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Task: {'id': task-1387852, 'name': ReconfigVM_Task, 'duration_secs': 0.85686} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 889.801054] env[62503]: DEBUG nova.virt.hardware [None req-409bdb25-8a95-4fcd-b277-69a03419ff2a tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:26:20Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:26:03Z,direct_url=,disk_format='vmdk',id=8150ca02-f879-471d-8913-459408f127a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26d9ee75d25b4018b8bb1fb11c8bd98c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:26:04Z,virtual_size=,visibility=), allow threads: False {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 889.801315] env[62503]: DEBUG nova.virt.hardware [None req-409bdb25-8a95-4fcd-b277-69a03419ff2a tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Flavor limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 889.801496] env[62503]: DEBUG nova.virt.hardware [None req-409bdb25-8a95-4fcd-b277-69a03419ff2a tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Image limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 889.801674] env[62503]: DEBUG nova.virt.hardware [None req-409bdb25-8a95-4fcd-b277-69a03419ff2a tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Flavor pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 889.801822] env[62503]: DEBUG nova.virt.hardware [None req-409bdb25-8a95-4fcd-b277-69a03419ff2a tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Image pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 889.801971] env[62503]: DEBUG nova.virt.hardware [None req-409bdb25-8a95-4fcd-b277-69a03419ff2a tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 889.802204] env[62503]: DEBUG nova.virt.hardware [None req-409bdb25-8a95-4fcd-b277-69a03419ff2a tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 889.802369] env[62503]: DEBUG nova.virt.hardware [None req-409bdb25-8a95-4fcd-b277-69a03419ff2a tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 889.802544] env[62503]: DEBUG nova.virt.hardware [None req-409bdb25-8a95-4fcd-b277-69a03419ff2a tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Got 1 possible topologies {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 889.802711] env[62503]: DEBUG nova.virt.hardware [None req-409bdb25-8a95-4fcd-b277-69a03419ff2a tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 889.802883] env[62503]: DEBUG nova.virt.hardware [None req-409bdb25-8a95-4fcd-b277-69a03419ff2a tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 889.803198] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-d3b4e233-4266-4cfe-a227-3bff2b6257fb tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: 16167e53-e45b-4b37-90c6-ab2c30ebf1aa] Reconfigured VM instance instance-0000004a to attach disk [datastore2] 16167e53-e45b-4b37-90c6-ab2c30ebf1aa/16167e53-e45b-4b37-90c6-ab2c30ebf1aa.vmdk or device None with type sparse {{(pid=62503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 889.804263] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e07f318a-1b2e-4c51-be12-e72f0d5f2789 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.806606] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-29a2ded2-ad50-4335-9dd0-1be28ea33c04 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.808570] env[62503]: DEBUG oslo_concurrency.lockutils [None req-4fdc7626-9734-4113-a7b4-a544a2ca22f6 tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 889.815617] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0058ebf-f248-4f7a-8318-d947086f5f8c {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.819987] env[62503]: DEBUG oslo_vmware.api [None req-d3b4e233-4266-4cfe-a227-3bff2b6257fb tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Waiting for the task: (returnval){ [ 889.819987] env[62503]: value = "task-1387853" [ 889.819987] env[62503]: _type = "Task" [ 889.819987] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 889.836900] env[62503]: DEBUG oslo_vmware.api [None req-d3b4e233-4266-4cfe-a227-3bff2b6257fb tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Task: {'id': task-1387853, 'name': Rename_Task} progress is 14%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.000797] env[62503]: DEBUG oslo_concurrency.lockutils [None req-784ff18d-15f2-4cb8-8623-ff87e0cf2977 tempest-InstanceActionsTestJSON-257954437 tempest-InstanceActionsTestJSON-257954437-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.276s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 890.001406] env[62503]: DEBUG nova.compute.manager [None req-784ff18d-15f2-4cb8-8623-ff87e0cf2977 tempest-InstanceActionsTestJSON-257954437 tempest-InstanceActionsTestJSON-257954437-project-member] [instance: c39e7ee3-1b97-44ec-92d6-733976c0f0f8] Start building networks asynchronously for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2832}} [ 890.004124] env[62503]: DEBUG oslo_concurrency.lockutils [None req-30e8662c-df08-452b-b0da-6a8680594759 tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.774s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 890.005673] env[62503]: INFO nova.compute.claims [None req-30e8662c-df08-452b-b0da-6a8680594759 tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] [instance: 7b8c670d-3f2a-431d-91da-4ced781e6e51] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 890.122937] env[62503]: DEBUG oslo_vmware.api [None req-a01f7696-ade1-4d90-9188-a82a8c2369af tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Task: {'id': task-1387851, 'name': PowerOnVM_Task, 'duration_secs': 1.372597} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 890.123245] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-a01f7696-ade1-4d90-9188-a82a8c2369af tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: c9129f68-c755-4b78-b067-b77b01048c02] Powered on the VM {{(pid=62503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 890.123462] env[62503]: INFO nova.compute.manager [None req-a01f7696-ade1-4d90-9188-a82a8c2369af tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: c9129f68-c755-4b78-b067-b77b01048c02] Took 10.16 seconds to spawn the instance on the hypervisor. [ 890.123649] env[62503]: DEBUG nova.compute.manager [None req-a01f7696-ade1-4d90-9188-a82a8c2369af tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: c9129f68-c755-4b78-b067-b77b01048c02] Checking state {{(pid=62503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1794}} [ 890.124456] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1068f64f-180f-4d98-aa29-ea402d9aa13b {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.219878] env[62503]: DEBUG nova.compute.manager [req-870b6c1b-64f4-466c-be47-e73b284c45d0 req-8eaca646-0d2d-4ae1-8e1a-72d80737b23d service nova] [instance: 629054bb-8fdb-45a2-8c07-216c4104d4a6] Received event network-vif-plugged-269d53b9-4861-484d-bdac-553a7fafc310 {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 890.220393] env[62503]: DEBUG oslo_concurrency.lockutils [req-870b6c1b-64f4-466c-be47-e73b284c45d0 req-8eaca646-0d2d-4ae1-8e1a-72d80737b23d service nova] Acquiring lock "629054bb-8fdb-45a2-8c07-216c4104d4a6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 890.220646] env[62503]: DEBUG oslo_concurrency.lockutils [req-870b6c1b-64f4-466c-be47-e73b284c45d0 req-8eaca646-0d2d-4ae1-8e1a-72d80737b23d service nova] Lock "629054bb-8fdb-45a2-8c07-216c4104d4a6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 890.220827] env[62503]: DEBUG oslo_concurrency.lockutils [req-870b6c1b-64f4-466c-be47-e73b284c45d0 req-8eaca646-0d2d-4ae1-8e1a-72d80737b23d service nova] Lock "629054bb-8fdb-45a2-8c07-216c4104d4a6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 890.221015] env[62503]: DEBUG nova.compute.manager [req-870b6c1b-64f4-466c-be47-e73b284c45d0 req-8eaca646-0d2d-4ae1-8e1a-72d80737b23d service nova] [instance: 629054bb-8fdb-45a2-8c07-216c4104d4a6] No waiting events found dispatching network-vif-plugged-269d53b9-4861-484d-bdac-553a7fafc310 {{(pid=62503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 890.221838] env[62503]: WARNING nova.compute.manager [req-870b6c1b-64f4-466c-be47-e73b284c45d0 req-8eaca646-0d2d-4ae1-8e1a-72d80737b23d service nova] [instance: 629054bb-8fdb-45a2-8c07-216c4104d4a6] Received unexpected event network-vif-plugged-269d53b9-4861-484d-bdac-553a7fafc310 for instance with vm_state building and task_state spawning. [ 890.238741] env[62503]: DEBUG oslo_concurrency.lockutils [req-744897e0-dc27-4837-a2fe-1de0aa123ec6 req-3b93b1c5-2a4b-4044-a58d-14ad42642b9d service nova] Releasing lock "refresh_cache-ca1f1966-bfe1-495e-b055-f72150f72470" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 890.239022] env[62503]: DEBUG oslo_concurrency.lockutils [None req-bc583026-98f6-4899-b166-facb931d7a18 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Acquired lock "refresh_cache-ca1f1966-bfe1-495e-b055-f72150f72470" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 890.239196] env[62503]: DEBUG nova.network.neutron [None req-bc583026-98f6-4899-b166-facb931d7a18 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: ca1f1966-bfe1-495e-b055-f72150f72470] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 890.334034] env[62503]: DEBUG oslo_vmware.api [None req-d3b4e233-4266-4cfe-a227-3bff2b6257fb tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Task: {'id': task-1387853, 'name': Rename_Task, 'duration_secs': 0.130113} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 890.334034] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3b4e233-4266-4cfe-a227-3bff2b6257fb tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: 16167e53-e45b-4b37-90c6-ab2c30ebf1aa] Powering on the VM {{(pid=62503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 890.334034] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-37ea4e29-eb2e-44f1-887a-0b4921430046 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.336480] env[62503]: DEBUG nova.network.neutron [None req-409bdb25-8a95-4fcd-b277-69a03419ff2a tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] [instance: 629054bb-8fdb-45a2-8c07-216c4104d4a6] Successfully updated port: 269d53b9-4861-484d-bdac-553a7fafc310 {{(pid=62503) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 890.338597] env[62503]: DEBUG oslo_vmware.api [None req-d3b4e233-4266-4cfe-a227-3bff2b6257fb tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Waiting for the task: (returnval){ [ 890.338597] env[62503]: value = "task-1387854" [ 890.338597] env[62503]: _type = "Task" [ 890.338597] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 890.346462] env[62503]: DEBUG oslo_vmware.api [None req-d3b4e233-4266-4cfe-a227-3bff2b6257fb tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Task: {'id': task-1387854, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.510657] env[62503]: DEBUG nova.compute.utils [None req-784ff18d-15f2-4cb8-8623-ff87e0cf2977 tempest-InstanceActionsTestJSON-257954437 tempest-InstanceActionsTestJSON-257954437-project-member] Using /dev/sd instead of None {{(pid=62503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 890.521184] env[62503]: DEBUG nova.compute.manager [None req-784ff18d-15f2-4cb8-8623-ff87e0cf2977 tempest-InstanceActionsTestJSON-257954437 tempest-InstanceActionsTestJSON-257954437-project-member] [instance: c39e7ee3-1b97-44ec-92d6-733976c0f0f8] Allocating IP information in the background. {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 890.521443] env[62503]: DEBUG nova.network.neutron [None req-784ff18d-15f2-4cb8-8623-ff87e0cf2977 tempest-InstanceActionsTestJSON-257954437 tempest-InstanceActionsTestJSON-257954437-project-member] [instance: c39e7ee3-1b97-44ec-92d6-733976c0f0f8] allocate_for_instance() {{(pid=62503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 890.612128] env[62503]: DEBUG nova.policy [None req-784ff18d-15f2-4cb8-8623-ff87e0cf2977 tempest-InstanceActionsTestJSON-257954437 tempest-InstanceActionsTestJSON-257954437-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bf0708bc29b048ed9d0085f853bd2088', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f28a2b1084de4f3eafb800bafb0ab68c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62503) authorize /opt/stack/nova/nova/policy.py:201}} [ 890.651803] env[62503]: INFO nova.compute.manager [None req-a01f7696-ade1-4d90-9188-a82a8c2369af tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: c9129f68-c755-4b78-b067-b77b01048c02] Took 26.62 seconds to build instance. [ 890.788575] env[62503]: DEBUG nova.network.neutron [None req-bc583026-98f6-4899-b166-facb931d7a18 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: ca1f1966-bfe1-495e-b055-f72150f72470] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 890.839859] env[62503]: DEBUG oslo_concurrency.lockutils [None req-409bdb25-8a95-4fcd-b277-69a03419ff2a tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Acquiring lock "refresh_cache-629054bb-8fdb-45a2-8c07-216c4104d4a6" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 890.840072] env[62503]: DEBUG oslo_concurrency.lockutils [None req-409bdb25-8a95-4fcd-b277-69a03419ff2a tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Acquired lock "refresh_cache-629054bb-8fdb-45a2-8c07-216c4104d4a6" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 890.840256] env[62503]: DEBUG nova.network.neutron [None req-409bdb25-8a95-4fcd-b277-69a03419ff2a tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] [instance: 629054bb-8fdb-45a2-8c07-216c4104d4a6] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 890.852181] env[62503]: DEBUG oslo_vmware.api [None req-d3b4e233-4266-4cfe-a227-3bff2b6257fb tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Task: {'id': task-1387854, 'name': PowerOnVM_Task} progress is 94%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.929534] env[62503]: DEBUG nova.network.neutron [None req-784ff18d-15f2-4cb8-8623-ff87e0cf2977 tempest-InstanceActionsTestJSON-257954437 tempest-InstanceActionsTestJSON-257954437-project-member] [instance: c39e7ee3-1b97-44ec-92d6-733976c0f0f8] Successfully created port: ad311052-6c0b-4b0a-ad92-0d1198dbad4c {{(pid=62503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 890.979283] env[62503]: DEBUG nova.network.neutron [None req-bc583026-98f6-4899-b166-facb931d7a18 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: ca1f1966-bfe1-495e-b055-f72150f72470] Updating instance_info_cache with network_info: [{"id": "02a00a1f-6723-41e6-b91a-5e79048551ce", "address": "fa:16:3e:16:b1:ef", "network": {"id": "ae12a8a2-c7af-40e0-878a-95b9a17d8356", "bridge": "br-int", "label": "tempest-ServersTestJSON-1776284685-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "44139c74b4b349af996a67f408a8441f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51ae336c-12cf-406a-b1ca-54e9ce553b3e", "external-id": "nsx-vlan-transportzone-30", "segmentation_id": 30, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap02a00a1f-67", "ovs_interfaceid": "02a00a1f-6723-41e6-b91a-5e79048551ce", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 891.016407] env[62503]: DEBUG nova.compute.manager [req-f9c2cec4-bbb0-4eaa-88b8-53dfa554c2a7 req-b884ab0e-d7b2-4729-9a58-13e099f24016 service nova] [instance: e693bcc2-3883-466d-913c-831146ca81e7] Received event network-changed-71aa781d-4a40-4f00-8fb8-06cb4c73986a {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 891.016690] env[62503]: DEBUG nova.compute.manager [req-f9c2cec4-bbb0-4eaa-88b8-53dfa554c2a7 req-b884ab0e-d7b2-4729-9a58-13e099f24016 service nova] [instance: e693bcc2-3883-466d-913c-831146ca81e7] Refreshing instance network info cache due to event network-changed-71aa781d-4a40-4f00-8fb8-06cb4c73986a. {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11432}} [ 891.017358] env[62503]: DEBUG oslo_concurrency.lockutils [req-f9c2cec4-bbb0-4eaa-88b8-53dfa554c2a7 req-b884ab0e-d7b2-4729-9a58-13e099f24016 service nova] Acquiring lock "refresh_cache-e693bcc2-3883-466d-913c-831146ca81e7" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 891.017358] env[62503]: DEBUG oslo_concurrency.lockutils [req-f9c2cec4-bbb0-4eaa-88b8-53dfa554c2a7 req-b884ab0e-d7b2-4729-9a58-13e099f24016 service nova] Acquired lock "refresh_cache-e693bcc2-3883-466d-913c-831146ca81e7" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 891.017358] env[62503]: DEBUG nova.network.neutron [req-f9c2cec4-bbb0-4eaa-88b8-53dfa554c2a7 req-b884ab0e-d7b2-4729-9a58-13e099f24016 service nova] [instance: e693bcc2-3883-466d-913c-831146ca81e7] Refreshing network info cache for port 71aa781d-4a40-4f00-8fb8-06cb4c73986a {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 891.022703] env[62503]: DEBUG nova.compute.manager [None req-784ff18d-15f2-4cb8-8623-ff87e0cf2977 tempest-InstanceActionsTestJSON-257954437 tempest-InstanceActionsTestJSON-257954437-project-member] [instance: c39e7ee3-1b97-44ec-92d6-733976c0f0f8] Start building block device mappings for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2867}} [ 891.159937] env[62503]: DEBUG oslo_concurrency.lockutils [None req-a01f7696-ade1-4d90-9188-a82a8c2369af tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Lock "c9129f68-c755-4b78-b067-b77b01048c02" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 28.282s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 891.213016] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-744c7d15-28b8-409d-9357-deafb6831f03 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.220546] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46341ea7-823b-47e0-b71a-1e33b8707638 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.252640] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9577b863-67eb-439e-b8fd-42cb5cbe9b05 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.260355] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4aa5bf9b-ec25-440d-9d33-3b59024bc796 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.273975] env[62503]: DEBUG nova.compute.provider_tree [None req-30e8662c-df08-452b-b0da-6a8680594759 tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 891.352740] env[62503]: DEBUG oslo_vmware.api [None req-d3b4e233-4266-4cfe-a227-3bff2b6257fb tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Task: {'id': task-1387854, 'name': PowerOnVM_Task, 'duration_secs': 0.761141} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 891.353070] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3b4e233-4266-4cfe-a227-3bff2b6257fb tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: 16167e53-e45b-4b37-90c6-ab2c30ebf1aa] Powered on the VM {{(pid=62503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 891.353261] env[62503]: INFO nova.compute.manager [None req-d3b4e233-4266-4cfe-a227-3bff2b6257fb tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: 16167e53-e45b-4b37-90c6-ab2c30ebf1aa] Took 9.08 seconds to spawn the instance on the hypervisor. [ 891.353444] env[62503]: DEBUG nova.compute.manager [None req-d3b4e233-4266-4cfe-a227-3bff2b6257fb tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: 16167e53-e45b-4b37-90c6-ab2c30ebf1aa] Checking state {{(pid=62503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1794}} [ 891.354222] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd51b0c7-cd7b-48ec-9dd1-86a953cf6866 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.451363] env[62503]: DEBUG nova.network.neutron [None req-409bdb25-8a95-4fcd-b277-69a03419ff2a tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] [instance: 629054bb-8fdb-45a2-8c07-216c4104d4a6] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 891.484248] env[62503]: DEBUG oslo_concurrency.lockutils [None req-bc583026-98f6-4899-b166-facb931d7a18 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Releasing lock "refresh_cache-ca1f1966-bfe1-495e-b055-f72150f72470" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 891.484581] env[62503]: DEBUG nova.compute.manager [None req-bc583026-98f6-4899-b166-facb931d7a18 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: ca1f1966-bfe1-495e-b055-f72150f72470] Instance network_info: |[{"id": "02a00a1f-6723-41e6-b91a-5e79048551ce", "address": "fa:16:3e:16:b1:ef", "network": {"id": "ae12a8a2-c7af-40e0-878a-95b9a17d8356", "bridge": "br-int", "label": "tempest-ServersTestJSON-1776284685-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "44139c74b4b349af996a67f408a8441f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51ae336c-12cf-406a-b1ca-54e9ce553b3e", "external-id": "nsx-vlan-transportzone-30", "segmentation_id": 30, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap02a00a1f-67", "ovs_interfaceid": "02a00a1f-6723-41e6-b91a-5e79048551ce", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1999}} [ 891.485206] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-bc583026-98f6-4899-b166-facb931d7a18 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: ca1f1966-bfe1-495e-b055-f72150f72470] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:16:b1:ef', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '51ae336c-12cf-406a-b1ca-54e9ce553b3e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '02a00a1f-6723-41e6-b91a-5e79048551ce', 'vif_model': 'vmxnet3'}] {{(pid=62503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 891.493027] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc583026-98f6-4899-b166-facb931d7a18 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Creating folder: Project (44139c74b4b349af996a67f408a8441f). Parent ref: group-v294540. {{(pid=62503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 891.495072] env[62503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9709f6d0-14fc-4832-8922-b74c46c180ac {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.504469] env[62503]: INFO nova.virt.vmwareapi.vm_util [None req-bc583026-98f6-4899-b166-facb931d7a18 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Created folder: Project (44139c74b4b349af996a67f408a8441f) in parent group-v294540. [ 891.504861] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc583026-98f6-4899-b166-facb931d7a18 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Creating folder: Instances. Parent ref: group-v294592. {{(pid=62503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 891.505139] env[62503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4e1532e7-5a34-4e20-80d3-3c01e7374b9f {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.513579] env[62503]: INFO nova.virt.vmwareapi.vm_util [None req-bc583026-98f6-4899-b166-facb931d7a18 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Created folder: Instances in parent group-v294592. [ 891.513828] env[62503]: DEBUG oslo.service.loopingcall [None req-bc583026-98f6-4899-b166-facb931d7a18 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 891.514040] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ca1f1966-bfe1-495e-b055-f72150f72470] Creating VM on the ESX host {{(pid=62503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 891.514253] env[62503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2eeed65d-60e3-4bf6-90e8-f74ec048756a {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.541420] env[62503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 891.541420] env[62503]: value = "task-1387857" [ 891.541420] env[62503]: _type = "Task" [ 891.541420] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 891.545487] env[62503]: DEBUG oslo_vmware.api [-] Task: {'id': task-1387857, 'name': CreateVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.751924] env[62503]: DEBUG nova.network.neutron [None req-409bdb25-8a95-4fcd-b277-69a03419ff2a tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] [instance: 629054bb-8fdb-45a2-8c07-216c4104d4a6] Updating instance_info_cache with network_info: [{"id": "269d53b9-4861-484d-bdac-553a7fafc310", "address": "fa:16:3e:02:be:62", "network": {"id": "db32bcf6-4f31-49a6-a030-459c7952b72c", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1233897985-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eac2608fc52a497f961d018c888a826f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271a82f1-1d09-4ad3-9c15-07269bad114c", "external-id": "nsx-vlan-transportzone-441", "segmentation_id": 441, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap269d53b9-48", "ovs_interfaceid": "269d53b9-4861-484d-bdac-553a7fafc310", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 891.779901] env[62503]: DEBUG nova.scheduler.client.report [None req-30e8662c-df08-452b-b0da-6a8680594759 tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 891.873558] env[62503]: INFO nova.compute.manager [None req-d3b4e233-4266-4cfe-a227-3bff2b6257fb tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: 16167e53-e45b-4b37-90c6-ab2c30ebf1aa] Took 22.58 seconds to build instance. [ 891.900214] env[62503]: DEBUG nova.network.neutron [req-f9c2cec4-bbb0-4eaa-88b8-53dfa554c2a7 req-b884ab0e-d7b2-4729-9a58-13e099f24016 service nova] [instance: e693bcc2-3883-466d-913c-831146ca81e7] Updated VIF entry in instance network info cache for port 71aa781d-4a40-4f00-8fb8-06cb4c73986a. {{(pid=62503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 891.900697] env[62503]: DEBUG nova.network.neutron [req-f9c2cec4-bbb0-4eaa-88b8-53dfa554c2a7 req-b884ab0e-d7b2-4729-9a58-13e099f24016 service nova] [instance: e693bcc2-3883-466d-913c-831146ca81e7] Updating instance_info_cache with network_info: [{"id": "71aa781d-4a40-4f00-8fb8-06cb4c73986a", "address": "fa:16:3e:24:a7:1c", "network": {"id": "ef338e2b-4aa8-4605-8748-910d0d3a3079", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-477257305-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.154", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "12a42517cf8f4ad3836f2f95e8833dd4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "13e71dbb-4279-427c-b39d-ba5df9895e58", "external-id": "nsx-vlan-transportzone-417", "segmentation_id": 417, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap71aa781d-4a", "ovs_interfaceid": "71aa781d-4a40-4f00-8fb8-06cb4c73986a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 892.035384] env[62503]: DEBUG nova.compute.manager [None req-784ff18d-15f2-4cb8-8623-ff87e0cf2977 tempest-InstanceActionsTestJSON-257954437 tempest-InstanceActionsTestJSON-257954437-project-member] [instance: c39e7ee3-1b97-44ec-92d6-733976c0f0f8] Start spawning the instance on the hypervisor. {{(pid=62503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2641}} [ 892.047801] env[62503]: DEBUG oslo_vmware.api [-] Task: {'id': task-1387857, 'name': CreateVM_Task, 'duration_secs': 0.361895} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.047801] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ca1f1966-bfe1-495e-b055-f72150f72470] Created VM on the ESX host {{(pid=62503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 892.048369] env[62503]: DEBUG oslo_concurrency.lockutils [None req-bc583026-98f6-4899-b166-facb931d7a18 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 892.048557] env[62503]: DEBUG oslo_concurrency.lockutils [None req-bc583026-98f6-4899-b166-facb931d7a18 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 892.048871] env[62503]: DEBUG oslo_concurrency.lockutils [None req-bc583026-98f6-4899-b166-facb931d7a18 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 892.049162] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ad70b1f2-d087-4221-a7a4-a466340985e0 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.055764] env[62503]: DEBUG oslo_vmware.api [None req-bc583026-98f6-4899-b166-facb931d7a18 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Waiting for the task: (returnval){ [ 892.055764] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52c2d71f-bd02-7f10-3d67-1fc80bbb7fd0" [ 892.055764] env[62503]: _type = "Task" [ 892.055764] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.061600] env[62503]: DEBUG nova.virt.hardware [None req-784ff18d-15f2-4cb8-8623-ff87e0cf2977 tempest-InstanceActionsTestJSON-257954437 tempest-InstanceActionsTestJSON-257954437-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:26:20Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:26:03Z,direct_url=,disk_format='vmdk',id=8150ca02-f879-471d-8913-459408f127a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26d9ee75d25b4018b8bb1fb11c8bd98c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:26:04Z,virtual_size=,visibility=), allow threads: False {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 892.061825] env[62503]: DEBUG nova.virt.hardware [None req-784ff18d-15f2-4cb8-8623-ff87e0cf2977 tempest-InstanceActionsTestJSON-257954437 tempest-InstanceActionsTestJSON-257954437-project-member] Flavor limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 892.061986] env[62503]: DEBUG nova.virt.hardware [None req-784ff18d-15f2-4cb8-8623-ff87e0cf2977 tempest-InstanceActionsTestJSON-257954437 tempest-InstanceActionsTestJSON-257954437-project-member] Image limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 892.062280] env[62503]: DEBUG nova.virt.hardware [None req-784ff18d-15f2-4cb8-8623-ff87e0cf2977 tempest-InstanceActionsTestJSON-257954437 tempest-InstanceActionsTestJSON-257954437-project-member] Flavor pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 892.062454] env[62503]: DEBUG nova.virt.hardware [None req-784ff18d-15f2-4cb8-8623-ff87e0cf2977 tempest-InstanceActionsTestJSON-257954437 tempest-InstanceActionsTestJSON-257954437-project-member] Image pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 892.062713] env[62503]: DEBUG nova.virt.hardware [None req-784ff18d-15f2-4cb8-8623-ff87e0cf2977 tempest-InstanceActionsTestJSON-257954437 tempest-InstanceActionsTestJSON-257954437-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 892.062969] env[62503]: DEBUG nova.virt.hardware [None req-784ff18d-15f2-4cb8-8623-ff87e0cf2977 tempest-InstanceActionsTestJSON-257954437 tempest-InstanceActionsTestJSON-257954437-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 892.063131] env[62503]: DEBUG nova.virt.hardware [None req-784ff18d-15f2-4cb8-8623-ff87e0cf2977 tempest-InstanceActionsTestJSON-257954437 tempest-InstanceActionsTestJSON-257954437-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 892.063308] env[62503]: DEBUG nova.virt.hardware [None req-784ff18d-15f2-4cb8-8623-ff87e0cf2977 tempest-InstanceActionsTestJSON-257954437 tempest-InstanceActionsTestJSON-257954437-project-member] Got 1 possible topologies {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 892.063597] env[62503]: DEBUG nova.virt.hardware [None req-784ff18d-15f2-4cb8-8623-ff87e0cf2977 tempest-InstanceActionsTestJSON-257954437 tempest-InstanceActionsTestJSON-257954437-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 892.063744] env[62503]: DEBUG nova.virt.hardware [None req-784ff18d-15f2-4cb8-8623-ff87e0cf2977 tempest-InstanceActionsTestJSON-257954437 tempest-InstanceActionsTestJSON-257954437-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 892.064528] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10637ba2-ab5d-4e6d-a98e-cc732c543238 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.071996] env[62503]: DEBUG oslo_vmware.api [None req-bc583026-98f6-4899-b166-facb931d7a18 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52c2d71f-bd02-7f10-3d67-1fc80bbb7fd0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.074971] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed38fc8b-47e8-4591-bd87-10620e0ac0c7 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.249973] env[62503]: DEBUG nova.compute.manager [req-6a0b7632-8fa8-42b7-9370-0c4357dd35ed req-950fb439-ef41-4b50-8eb6-19580e341105 service nova] [instance: 629054bb-8fdb-45a2-8c07-216c4104d4a6] Received event network-changed-269d53b9-4861-484d-bdac-553a7fafc310 {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 892.250209] env[62503]: DEBUG nova.compute.manager [req-6a0b7632-8fa8-42b7-9370-0c4357dd35ed req-950fb439-ef41-4b50-8eb6-19580e341105 service nova] [instance: 629054bb-8fdb-45a2-8c07-216c4104d4a6] Refreshing instance network info cache due to event network-changed-269d53b9-4861-484d-bdac-553a7fafc310. {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11432}} [ 892.250469] env[62503]: DEBUG oslo_concurrency.lockutils [req-6a0b7632-8fa8-42b7-9370-0c4357dd35ed req-950fb439-ef41-4b50-8eb6-19580e341105 service nova] Acquiring lock "refresh_cache-629054bb-8fdb-45a2-8c07-216c4104d4a6" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 892.255576] env[62503]: DEBUG oslo_concurrency.lockutils [None req-409bdb25-8a95-4fcd-b277-69a03419ff2a tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Releasing lock "refresh_cache-629054bb-8fdb-45a2-8c07-216c4104d4a6" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 892.255872] env[62503]: DEBUG nova.compute.manager [None req-409bdb25-8a95-4fcd-b277-69a03419ff2a tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] [instance: 629054bb-8fdb-45a2-8c07-216c4104d4a6] Instance network_info: |[{"id": "269d53b9-4861-484d-bdac-553a7fafc310", "address": "fa:16:3e:02:be:62", "network": {"id": "db32bcf6-4f31-49a6-a030-459c7952b72c", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1233897985-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eac2608fc52a497f961d018c888a826f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271a82f1-1d09-4ad3-9c15-07269bad114c", "external-id": "nsx-vlan-transportzone-441", "segmentation_id": 441, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap269d53b9-48", "ovs_interfaceid": "269d53b9-4861-484d-bdac-553a7fafc310", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1999}} [ 892.256157] env[62503]: DEBUG oslo_concurrency.lockutils [req-6a0b7632-8fa8-42b7-9370-0c4357dd35ed req-950fb439-ef41-4b50-8eb6-19580e341105 service nova] Acquired lock "refresh_cache-629054bb-8fdb-45a2-8c07-216c4104d4a6" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 892.256334] env[62503]: DEBUG nova.network.neutron [req-6a0b7632-8fa8-42b7-9370-0c4357dd35ed req-950fb439-ef41-4b50-8eb6-19580e341105 service nova] [instance: 629054bb-8fdb-45a2-8c07-216c4104d4a6] Refreshing network info cache for port 269d53b9-4861-484d-bdac-553a7fafc310 {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 892.257464] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-409bdb25-8a95-4fcd-b277-69a03419ff2a tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] [instance: 629054bb-8fdb-45a2-8c07-216c4104d4a6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:02:be:62', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '271a82f1-1d09-4ad3-9c15-07269bad114c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '269d53b9-4861-484d-bdac-553a7fafc310', 'vif_model': 'vmxnet3'}] {{(pid=62503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 892.264697] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-409bdb25-8a95-4fcd-b277-69a03419ff2a tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Creating folder: Project (eac2608fc52a497f961d018c888a826f). Parent ref: group-v294540. {{(pid=62503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 892.265751] env[62503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-66b66e38-f0e2-4c11-898f-2c3d39033672 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.276656] env[62503]: INFO nova.virt.vmwareapi.vm_util [None req-409bdb25-8a95-4fcd-b277-69a03419ff2a tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Created folder: Project (eac2608fc52a497f961d018c888a826f) in parent group-v294540. [ 892.276850] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-409bdb25-8a95-4fcd-b277-69a03419ff2a tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Creating folder: Instances. Parent ref: group-v294595. {{(pid=62503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 892.277096] env[62503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c1081692-2d51-425d-9457-f5b849c0a066 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.287987] env[62503]: DEBUG oslo_concurrency.lockutils [None req-30e8662c-df08-452b-b0da-6a8680594759 tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.282s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 892.287987] env[62503]: DEBUG nova.compute.manager [None req-30e8662c-df08-452b-b0da-6a8680594759 tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] [instance: 7b8c670d-3f2a-431d-91da-4ced781e6e51] Start building networks asynchronously for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2832}} [ 892.289497] env[62503]: INFO nova.virt.vmwareapi.vm_util [None req-409bdb25-8a95-4fcd-b277-69a03419ff2a tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Created folder: Instances in parent group-v294595. [ 892.289722] env[62503]: DEBUG oslo.service.loopingcall [None req-409bdb25-8a95-4fcd-b277-69a03419ff2a tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 892.290162] env[62503]: DEBUG oslo_concurrency.lockutils [None req-2026f2a3-ca55-406d-af72-4e933a561068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.535s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 892.291632] env[62503]: INFO nova.compute.claims [None req-2026f2a3-ca55-406d-af72-4e933a561068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: c09488ed-e354-4abf-8999-b2f8afec44fc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 892.293828] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 629054bb-8fdb-45a2-8c07-216c4104d4a6] Creating VM on the ESX host {{(pid=62503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 892.295578] env[62503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-53621976-95b6-464a-b0d3-7961feb94581 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.314700] env[62503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 892.314700] env[62503]: value = "task-1387860" [ 892.314700] env[62503]: _type = "Task" [ 892.314700] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.322857] env[62503]: DEBUG oslo_vmware.api [-] Task: {'id': task-1387860, 'name': CreateVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.375558] env[62503]: DEBUG oslo_concurrency.lockutils [None req-d3b4e233-4266-4cfe-a227-3bff2b6257fb tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Lock "16167e53-e45b-4b37-90c6-ab2c30ebf1aa" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.097s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 892.404293] env[62503]: DEBUG oslo_concurrency.lockutils [req-f9c2cec4-bbb0-4eaa-88b8-53dfa554c2a7 req-b884ab0e-d7b2-4729-9a58-13e099f24016 service nova] Releasing lock "refresh_cache-e693bcc2-3883-466d-913c-831146ca81e7" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 892.562017] env[62503]: DEBUG nova.network.neutron [None req-784ff18d-15f2-4cb8-8623-ff87e0cf2977 tempest-InstanceActionsTestJSON-257954437 tempest-InstanceActionsTestJSON-257954437-project-member] [instance: c39e7ee3-1b97-44ec-92d6-733976c0f0f8] Successfully updated port: ad311052-6c0b-4b0a-ad92-0d1198dbad4c {{(pid=62503) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 892.569834] env[62503]: DEBUG oslo_vmware.api [None req-bc583026-98f6-4899-b166-facb931d7a18 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52c2d71f-bd02-7f10-3d67-1fc80bbb7fd0, 'name': SearchDatastore_Task, 'duration_secs': 0.028991} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.570484] env[62503]: DEBUG oslo_concurrency.lockutils [None req-bc583026-98f6-4899-b166-facb931d7a18 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 892.571481] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-bc583026-98f6-4899-b166-facb931d7a18 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: ca1f1966-bfe1-495e-b055-f72150f72470] Processing image 8150ca02-f879-471d-8913-459408f127a1 {{(pid=62503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 892.571481] env[62503]: DEBUG oslo_concurrency.lockutils [None req-bc583026-98f6-4899-b166-facb931d7a18 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 892.571481] env[62503]: DEBUG oslo_concurrency.lockutils [None req-bc583026-98f6-4899-b166-facb931d7a18 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 892.571481] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-bc583026-98f6-4899-b166-facb931d7a18 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 892.571806] env[62503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2bc3915f-ee32-4706-bf5e-567dd9a9830e {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.581026] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-bc583026-98f6-4899-b166-facb931d7a18 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 892.581026] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-bc583026-98f6-4899-b166-facb931d7a18 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 892.581417] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-85483e22-f1a4-4752-b75d-34c4015a6281 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.588163] env[62503]: DEBUG oslo_vmware.api [None req-bc583026-98f6-4899-b166-facb931d7a18 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Waiting for the task: (returnval){ [ 892.588163] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52c0d659-15a0-de6f-1b16-67c13c51258c" [ 892.588163] env[62503]: _type = "Task" [ 892.588163] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.596117] env[62503]: DEBUG oslo_vmware.api [None req-bc583026-98f6-4899-b166-facb931d7a18 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52c0d659-15a0-de6f-1b16-67c13c51258c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.680260] env[62503]: DEBUG oslo_concurrency.lockutils [None req-da58a238-4e42-4269-977f-a144551c2ae1 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Acquiring lock "16167e53-e45b-4b37-90c6-ab2c30ebf1aa" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 892.680659] env[62503]: DEBUG oslo_concurrency.lockutils [None req-da58a238-4e42-4269-977f-a144551c2ae1 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Lock "16167e53-e45b-4b37-90c6-ab2c30ebf1aa" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.001s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 892.680903] env[62503]: INFO nova.compute.manager [None req-da58a238-4e42-4269-977f-a144551c2ae1 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: 16167e53-e45b-4b37-90c6-ab2c30ebf1aa] Shelving [ 892.723401] env[62503]: DEBUG nova.compute.manager [None req-55afd046-fdf9-4041-8df2-ea3cb62954c4 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: c9129f68-c755-4b78-b067-b77b01048c02] Stashing vm_state: active {{(pid=62503) _prep_resize /opt/stack/nova/nova/compute/manager.py:5920}} [ 892.792820] env[62503]: DEBUG nova.compute.utils [None req-30e8662c-df08-452b-b0da-6a8680594759 tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Using /dev/sd instead of None {{(pid=62503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 892.794862] env[62503]: DEBUG nova.compute.manager [None req-30e8662c-df08-452b-b0da-6a8680594759 tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] [instance: 7b8c670d-3f2a-431d-91da-4ced781e6e51] Allocating IP information in the background. {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 892.794862] env[62503]: DEBUG nova.network.neutron [None req-30e8662c-df08-452b-b0da-6a8680594759 tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] [instance: 7b8c670d-3f2a-431d-91da-4ced781e6e51] allocate_for_instance() {{(pid=62503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 892.825102] env[62503]: DEBUG oslo_vmware.api [-] Task: {'id': task-1387860, 'name': CreateVM_Task, 'duration_secs': 0.396179} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.825280] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 629054bb-8fdb-45a2-8c07-216c4104d4a6] Created VM on the ESX host {{(pid=62503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 892.825959] env[62503]: DEBUG oslo_concurrency.lockutils [None req-409bdb25-8a95-4fcd-b277-69a03419ff2a tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 892.826854] env[62503]: DEBUG oslo_concurrency.lockutils [None req-409bdb25-8a95-4fcd-b277-69a03419ff2a tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Acquired lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 892.826854] env[62503]: DEBUG oslo_concurrency.lockutils [None req-409bdb25-8a95-4fcd-b277-69a03419ff2a tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 892.826854] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eff4e939-86e6-4d03-b547-4fdd303609fc {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.831439] env[62503]: DEBUG oslo_vmware.api [None req-409bdb25-8a95-4fcd-b277-69a03419ff2a tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Waiting for the task: (returnval){ [ 892.831439] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]522823f5-6071-6352-e311-9c48a1dd8a38" [ 892.831439] env[62503]: _type = "Task" [ 892.831439] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.839265] env[62503]: DEBUG oslo_vmware.api [None req-409bdb25-8a95-4fcd-b277-69a03419ff2a tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]522823f5-6071-6352-e311-9c48a1dd8a38, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.850551] env[62503]: DEBUG nova.policy [None req-30e8662c-df08-452b-b0da-6a8680594759 tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bbe8eb64be214c2d856e3f4576edbf72', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'eac2608fc52a497f961d018c888a826f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62503) authorize /opt/stack/nova/nova/policy.py:201}} [ 893.030129] env[62503]: DEBUG nova.network.neutron [req-6a0b7632-8fa8-42b7-9370-0c4357dd35ed req-950fb439-ef41-4b50-8eb6-19580e341105 service nova] [instance: 629054bb-8fdb-45a2-8c07-216c4104d4a6] Updated VIF entry in instance network info cache for port 269d53b9-4861-484d-bdac-553a7fafc310. {{(pid=62503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 893.030571] env[62503]: DEBUG nova.network.neutron [req-6a0b7632-8fa8-42b7-9370-0c4357dd35ed req-950fb439-ef41-4b50-8eb6-19580e341105 service nova] [instance: 629054bb-8fdb-45a2-8c07-216c4104d4a6] Updating instance_info_cache with network_info: [{"id": "269d53b9-4861-484d-bdac-553a7fafc310", "address": "fa:16:3e:02:be:62", "network": {"id": "db32bcf6-4f31-49a6-a030-459c7952b72c", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1233897985-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eac2608fc52a497f961d018c888a826f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271a82f1-1d09-4ad3-9c15-07269bad114c", "external-id": "nsx-vlan-transportzone-441", "segmentation_id": 441, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap269d53b9-48", "ovs_interfaceid": "269d53b9-4861-484d-bdac-553a7fafc310", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 893.071306] env[62503]: DEBUG oslo_concurrency.lockutils [None req-784ff18d-15f2-4cb8-8623-ff87e0cf2977 tempest-InstanceActionsTestJSON-257954437 tempest-InstanceActionsTestJSON-257954437-project-member] Acquiring lock "refresh_cache-c39e7ee3-1b97-44ec-92d6-733976c0f0f8" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 893.071663] env[62503]: DEBUG oslo_concurrency.lockutils [None req-784ff18d-15f2-4cb8-8623-ff87e0cf2977 tempest-InstanceActionsTestJSON-257954437 tempest-InstanceActionsTestJSON-257954437-project-member] Acquired lock "refresh_cache-c39e7ee3-1b97-44ec-92d6-733976c0f0f8" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 893.071801] env[62503]: DEBUG nova.network.neutron [None req-784ff18d-15f2-4cb8-8623-ff87e0cf2977 tempest-InstanceActionsTestJSON-257954437 tempest-InstanceActionsTestJSON-257954437-project-member] [instance: c39e7ee3-1b97-44ec-92d6-733976c0f0f8] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 893.103362] env[62503]: DEBUG oslo_vmware.api [None req-bc583026-98f6-4899-b166-facb931d7a18 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52c0d659-15a0-de6f-1b16-67c13c51258c, 'name': SearchDatastore_Task, 'duration_secs': 0.012617} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 893.103362] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-267d7bc7-90b2-4f41-9d1f-5ec58e5e205f {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.107059] env[62503]: DEBUG oslo_vmware.api [None req-bc583026-98f6-4899-b166-facb931d7a18 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Waiting for the task: (returnval){ [ 893.107059] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52e74152-6356-089e-c030-0672e676f0a6" [ 893.107059] env[62503]: _type = "Task" [ 893.107059] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.116564] env[62503]: DEBUG oslo_vmware.api [None req-bc583026-98f6-4899-b166-facb931d7a18 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52e74152-6356-089e-c030-0672e676f0a6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.125806] env[62503]: DEBUG nova.network.neutron [None req-30e8662c-df08-452b-b0da-6a8680594759 tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] [instance: 7b8c670d-3f2a-431d-91da-4ced781e6e51] Successfully created port: e75a1fc0-2c4a-4103-8539-b8a7d4f9b24e {{(pid=62503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 893.247331] env[62503]: DEBUG oslo_concurrency.lockutils [None req-55afd046-fdf9-4041-8df2-ea3cb62954c4 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 893.301043] env[62503]: DEBUG nova.compute.manager [None req-30e8662c-df08-452b-b0da-6a8680594759 tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] [instance: 7b8c670d-3f2a-431d-91da-4ced781e6e51] Start building block device mappings for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2867}} [ 893.344712] env[62503]: DEBUG oslo_vmware.api [None req-409bdb25-8a95-4fcd-b277-69a03419ff2a tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]522823f5-6071-6352-e311-9c48a1dd8a38, 'name': SearchDatastore_Task, 'duration_secs': 0.01439} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 893.347455] env[62503]: DEBUG oslo_concurrency.lockutils [None req-409bdb25-8a95-4fcd-b277-69a03419ff2a tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Releasing lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 893.347850] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-409bdb25-8a95-4fcd-b277-69a03419ff2a tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] [instance: 629054bb-8fdb-45a2-8c07-216c4104d4a6] Processing image 8150ca02-f879-471d-8913-459408f127a1 {{(pid=62503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 893.348221] env[62503]: DEBUG oslo_concurrency.lockutils [None req-409bdb25-8a95-4fcd-b277-69a03419ff2a tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 893.348517] env[62503]: DEBUG oslo_concurrency.lockutils [None req-409bdb25-8a95-4fcd-b277-69a03419ff2a tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Acquired lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 893.348875] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-409bdb25-8a95-4fcd-b277-69a03419ff2a tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 893.349635] env[62503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fe1c4629-9d0f-4165-8a3e-0896e18eab35 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.358970] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-409bdb25-8a95-4fcd-b277-69a03419ff2a tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 893.359188] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-409bdb25-8a95-4fcd-b277-69a03419ff2a tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 893.362161] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ffc31840-5c39-488b-bcab-0567ec92a3ff {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.367774] env[62503]: DEBUG oslo_vmware.api [None req-409bdb25-8a95-4fcd-b277-69a03419ff2a tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Waiting for the task: (returnval){ [ 893.367774] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52fabced-2bf2-9c71-c9eb-b735fc064708" [ 893.367774] env[62503]: _type = "Task" [ 893.367774] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.377177] env[62503]: DEBUG oslo_vmware.api [None req-409bdb25-8a95-4fcd-b277-69a03419ff2a tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52fabced-2bf2-9c71-c9eb-b735fc064708, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.499091] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46732be4-13b3-492b-b5cd-c5d19abca29c {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.507236] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a866462-af9b-45a5-b328-dc6dc1e6e038 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.536843] env[62503]: DEBUG oslo_concurrency.lockutils [req-6a0b7632-8fa8-42b7-9370-0c4357dd35ed req-950fb439-ef41-4b50-8eb6-19580e341105 service nova] Releasing lock "refresh_cache-629054bb-8fdb-45a2-8c07-216c4104d4a6" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 893.537951] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-708964a8-4a13-44e7-ad95-bb76ec3e7f96 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.545768] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-620d2955-a0ec-4514-a001-2f99e2daf61e {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.559246] env[62503]: DEBUG nova.compute.provider_tree [None req-2026f2a3-ca55-406d-af72-4e933a561068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 893.607276] env[62503]: DEBUG nova.network.neutron [None req-784ff18d-15f2-4cb8-8623-ff87e0cf2977 tempest-InstanceActionsTestJSON-257954437 tempest-InstanceActionsTestJSON-257954437-project-member] [instance: c39e7ee3-1b97-44ec-92d6-733976c0f0f8] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 893.618894] env[62503]: DEBUG oslo_vmware.api [None req-bc583026-98f6-4899-b166-facb931d7a18 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52e74152-6356-089e-c030-0672e676f0a6, 'name': SearchDatastore_Task, 'duration_secs': 0.01057} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 893.619177] env[62503]: DEBUG oslo_concurrency.lockutils [None req-bc583026-98f6-4899-b166-facb931d7a18 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 893.619444] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc583026-98f6-4899-b166-facb931d7a18 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk to [datastore2] ca1f1966-bfe1-495e-b055-f72150f72470/ca1f1966-bfe1-495e-b055-f72150f72470.vmdk {{(pid=62503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 893.619707] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-dfc71029-48e0-4ec1-941a-6609687da604 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.626930] env[62503]: DEBUG oslo_vmware.api [None req-bc583026-98f6-4899-b166-facb931d7a18 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Waiting for the task: (returnval){ [ 893.626930] env[62503]: value = "task-1387861" [ 893.626930] env[62503]: _type = "Task" [ 893.626930] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.635863] env[62503]: DEBUG oslo_vmware.api [None req-bc583026-98f6-4899-b166-facb931d7a18 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Task: {'id': task-1387861, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.690500] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-da58a238-4e42-4269-977f-a144551c2ae1 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: 16167e53-e45b-4b37-90c6-ab2c30ebf1aa] Powering off the VM {{(pid=62503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 893.690901] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-be7eee52-c413-46e7-878b-dee93762e129 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.698062] env[62503]: DEBUG oslo_vmware.api [None req-da58a238-4e42-4269-977f-a144551c2ae1 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Waiting for the task: (returnval){ [ 893.698062] env[62503]: value = "task-1387862" [ 893.698062] env[62503]: _type = "Task" [ 893.698062] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.707501] env[62503]: DEBUG oslo_vmware.api [None req-da58a238-4e42-4269-977f-a144551c2ae1 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Task: {'id': task-1387862, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.809421] env[62503]: DEBUG nova.network.neutron [None req-784ff18d-15f2-4cb8-8623-ff87e0cf2977 tempest-InstanceActionsTestJSON-257954437 tempest-InstanceActionsTestJSON-257954437-project-member] [instance: c39e7ee3-1b97-44ec-92d6-733976c0f0f8] Updating instance_info_cache with network_info: [{"id": "ad311052-6c0b-4b0a-ad92-0d1198dbad4c", "address": "fa:16:3e:3b:38:32", "network": {"id": "082037b2-ad22-4d1a-84b7-a53daf45a565", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-1411425723-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f28a2b1084de4f3eafb800bafb0ab68c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c1e7173e-4163-4212-9339-aea3eddd359e", "external-id": "nsx-vlan-transportzone-525", "segmentation_id": 525, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapad311052-6c", "ovs_interfaceid": "ad311052-6c0b-4b0a-ad92-0d1198dbad4c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 893.886803] env[62503]: DEBUG oslo_vmware.api [None req-409bdb25-8a95-4fcd-b277-69a03419ff2a tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52fabced-2bf2-9c71-c9eb-b735fc064708, 'name': SearchDatastore_Task, 'duration_secs': 0.010264} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 893.888404] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-233a9009-1d8b-41c2-87aa-75160c48565b {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.895824] env[62503]: DEBUG oslo_vmware.api [None req-409bdb25-8a95-4fcd-b277-69a03419ff2a tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Waiting for the task: (returnval){ [ 893.895824] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]520c24f2-b53c-28b3-8d09-0e7627240975" [ 893.895824] env[62503]: _type = "Task" [ 893.895824] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.909373] env[62503]: DEBUG oslo_vmware.api [None req-409bdb25-8a95-4fcd-b277-69a03419ff2a tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]520c24f2-b53c-28b3-8d09-0e7627240975, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.062626] env[62503]: DEBUG nova.scheduler.client.report [None req-2026f2a3-ca55-406d-af72-4e933a561068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 894.141061] env[62503]: DEBUG oslo_vmware.api [None req-bc583026-98f6-4899-b166-facb931d7a18 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Task: {'id': task-1387861, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.210819] env[62503]: DEBUG oslo_vmware.api [None req-da58a238-4e42-4269-977f-a144551c2ae1 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Task: {'id': task-1387862, 'name': PowerOffVM_Task, 'duration_secs': 0.185696} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 894.211136] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-da58a238-4e42-4269-977f-a144551c2ae1 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: 16167e53-e45b-4b37-90c6-ab2c30ebf1aa] Powered off the VM {{(pid=62503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 894.212080] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d4dd6b5-486e-4cbb-8212-6a90cfad1587 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.231223] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24b0ec6c-daf4-4b55-859d-e25b58f0e60c {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.284695] env[62503]: DEBUG nova.compute.manager [req-d134eb1b-f4a4-490d-a1b2-cfb2b0965d6e req-4936343a-6c12-4c4f-9750-600b306fece5 service nova] [instance: c39e7ee3-1b97-44ec-92d6-733976c0f0f8] Received event network-vif-plugged-ad311052-6c0b-4b0a-ad92-0d1198dbad4c {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 894.285035] env[62503]: DEBUG oslo_concurrency.lockutils [req-d134eb1b-f4a4-490d-a1b2-cfb2b0965d6e req-4936343a-6c12-4c4f-9750-600b306fece5 service nova] Acquiring lock "c39e7ee3-1b97-44ec-92d6-733976c0f0f8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 894.285349] env[62503]: DEBUG oslo_concurrency.lockutils [req-d134eb1b-f4a4-490d-a1b2-cfb2b0965d6e req-4936343a-6c12-4c4f-9750-600b306fece5 service nova] Lock "c39e7ee3-1b97-44ec-92d6-733976c0f0f8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 894.285547] env[62503]: DEBUG oslo_concurrency.lockutils [req-d134eb1b-f4a4-490d-a1b2-cfb2b0965d6e req-4936343a-6c12-4c4f-9750-600b306fece5 service nova] Lock "c39e7ee3-1b97-44ec-92d6-733976c0f0f8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 894.285755] env[62503]: DEBUG nova.compute.manager [req-d134eb1b-f4a4-490d-a1b2-cfb2b0965d6e req-4936343a-6c12-4c4f-9750-600b306fece5 service nova] [instance: c39e7ee3-1b97-44ec-92d6-733976c0f0f8] No waiting events found dispatching network-vif-plugged-ad311052-6c0b-4b0a-ad92-0d1198dbad4c {{(pid=62503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 894.285945] env[62503]: WARNING nova.compute.manager [req-d134eb1b-f4a4-490d-a1b2-cfb2b0965d6e req-4936343a-6c12-4c4f-9750-600b306fece5 service nova] [instance: c39e7ee3-1b97-44ec-92d6-733976c0f0f8] Received unexpected event network-vif-plugged-ad311052-6c0b-4b0a-ad92-0d1198dbad4c for instance with vm_state building and task_state spawning. [ 894.286126] env[62503]: DEBUG nova.compute.manager [req-d134eb1b-f4a4-490d-a1b2-cfb2b0965d6e req-4936343a-6c12-4c4f-9750-600b306fece5 service nova] [instance: c39e7ee3-1b97-44ec-92d6-733976c0f0f8] Received event network-changed-ad311052-6c0b-4b0a-ad92-0d1198dbad4c {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 894.286289] env[62503]: DEBUG nova.compute.manager [req-d134eb1b-f4a4-490d-a1b2-cfb2b0965d6e req-4936343a-6c12-4c4f-9750-600b306fece5 service nova] [instance: c39e7ee3-1b97-44ec-92d6-733976c0f0f8] Refreshing instance network info cache due to event network-changed-ad311052-6c0b-4b0a-ad92-0d1198dbad4c. {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11432}} [ 894.286466] env[62503]: DEBUG oslo_concurrency.lockutils [req-d134eb1b-f4a4-490d-a1b2-cfb2b0965d6e req-4936343a-6c12-4c4f-9750-600b306fece5 service nova] Acquiring lock "refresh_cache-c39e7ee3-1b97-44ec-92d6-733976c0f0f8" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 894.309529] env[62503]: DEBUG nova.compute.manager [None req-30e8662c-df08-452b-b0da-6a8680594759 tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] [instance: 7b8c670d-3f2a-431d-91da-4ced781e6e51] Start spawning the instance on the hypervisor. {{(pid=62503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2641}} [ 894.311957] env[62503]: DEBUG oslo_concurrency.lockutils [None req-784ff18d-15f2-4cb8-8623-ff87e0cf2977 tempest-InstanceActionsTestJSON-257954437 tempest-InstanceActionsTestJSON-257954437-project-member] Releasing lock "refresh_cache-c39e7ee3-1b97-44ec-92d6-733976c0f0f8" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 894.312261] env[62503]: DEBUG nova.compute.manager [None req-784ff18d-15f2-4cb8-8623-ff87e0cf2977 tempest-InstanceActionsTestJSON-257954437 tempest-InstanceActionsTestJSON-257954437-project-member] [instance: c39e7ee3-1b97-44ec-92d6-733976c0f0f8] Instance network_info: |[{"id": "ad311052-6c0b-4b0a-ad92-0d1198dbad4c", "address": "fa:16:3e:3b:38:32", "network": {"id": "082037b2-ad22-4d1a-84b7-a53daf45a565", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-1411425723-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f28a2b1084de4f3eafb800bafb0ab68c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c1e7173e-4163-4212-9339-aea3eddd359e", "external-id": "nsx-vlan-transportzone-525", "segmentation_id": 525, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapad311052-6c", "ovs_interfaceid": "ad311052-6c0b-4b0a-ad92-0d1198dbad4c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1999}} [ 894.312547] env[62503]: DEBUG oslo_concurrency.lockutils [req-d134eb1b-f4a4-490d-a1b2-cfb2b0965d6e req-4936343a-6c12-4c4f-9750-600b306fece5 service nova] Acquired lock "refresh_cache-c39e7ee3-1b97-44ec-92d6-733976c0f0f8" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 894.312800] env[62503]: DEBUG nova.network.neutron [req-d134eb1b-f4a4-490d-a1b2-cfb2b0965d6e req-4936343a-6c12-4c4f-9750-600b306fece5 service nova] [instance: c39e7ee3-1b97-44ec-92d6-733976c0f0f8] Refreshing network info cache for port ad311052-6c0b-4b0a-ad92-0d1198dbad4c {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 894.314411] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-784ff18d-15f2-4cb8-8623-ff87e0cf2977 tempest-InstanceActionsTestJSON-257954437 tempest-InstanceActionsTestJSON-257954437-project-member] [instance: c39e7ee3-1b97-44ec-92d6-733976c0f0f8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3b:38:32', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c1e7173e-4163-4212-9339-aea3eddd359e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ad311052-6c0b-4b0a-ad92-0d1198dbad4c', 'vif_model': 'vmxnet3'}] {{(pid=62503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 894.323342] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-784ff18d-15f2-4cb8-8623-ff87e0cf2977 tempest-InstanceActionsTestJSON-257954437 tempest-InstanceActionsTestJSON-257954437-project-member] Creating folder: Project (f28a2b1084de4f3eafb800bafb0ab68c). Parent ref: group-v294540. {{(pid=62503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 894.325017] env[62503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4b7395dc-1608-4eeb-b349-d673e82f3259 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.336088] env[62503]: INFO nova.virt.vmwareapi.vm_util [None req-784ff18d-15f2-4cb8-8623-ff87e0cf2977 tempest-InstanceActionsTestJSON-257954437 tempest-InstanceActionsTestJSON-257954437-project-member] Created folder: Project (f28a2b1084de4f3eafb800bafb0ab68c) in parent group-v294540. [ 894.336409] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-784ff18d-15f2-4cb8-8623-ff87e0cf2977 tempest-InstanceActionsTestJSON-257954437 tempest-InstanceActionsTestJSON-257954437-project-member] Creating folder: Instances. Parent ref: group-v294598. {{(pid=62503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 894.336690] env[62503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-78253835-71a1-4196-90da-9a49ae641aa4 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.341362] env[62503]: DEBUG nova.virt.hardware [None req-30e8662c-df08-452b-b0da-6a8680594759 tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:26:20Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:26:03Z,direct_url=,disk_format='vmdk',id=8150ca02-f879-471d-8913-459408f127a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26d9ee75d25b4018b8bb1fb11c8bd98c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:26:04Z,virtual_size=,visibility=), allow threads: False {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 894.341675] env[62503]: DEBUG nova.virt.hardware [None req-30e8662c-df08-452b-b0da-6a8680594759 tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Flavor limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 894.341876] env[62503]: DEBUG nova.virt.hardware [None req-30e8662c-df08-452b-b0da-6a8680594759 tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Image limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 894.342097] env[62503]: DEBUG nova.virt.hardware [None req-30e8662c-df08-452b-b0da-6a8680594759 tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Flavor pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 894.342257] env[62503]: DEBUG nova.virt.hardware [None req-30e8662c-df08-452b-b0da-6a8680594759 tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Image pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 894.342413] env[62503]: DEBUG nova.virt.hardware [None req-30e8662c-df08-452b-b0da-6a8680594759 tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 894.342631] env[62503]: DEBUG nova.virt.hardware [None req-30e8662c-df08-452b-b0da-6a8680594759 tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 894.342877] env[62503]: DEBUG nova.virt.hardware [None req-30e8662c-df08-452b-b0da-6a8680594759 tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 894.343072] env[62503]: DEBUG nova.virt.hardware [None req-30e8662c-df08-452b-b0da-6a8680594759 tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Got 1 possible topologies {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 894.343247] env[62503]: DEBUG nova.virt.hardware [None req-30e8662c-df08-452b-b0da-6a8680594759 tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 894.343426] env[62503]: DEBUG nova.virt.hardware [None req-30e8662c-df08-452b-b0da-6a8680594759 tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 894.344553] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42857154-520f-4160-84be-0e7064e0694f {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.352519] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53dacc2b-4f3a-42c8-a988-6708497e2af3 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.358193] env[62503]: INFO nova.virt.vmwareapi.vm_util [None req-784ff18d-15f2-4cb8-8623-ff87e0cf2977 tempest-InstanceActionsTestJSON-257954437 tempest-InstanceActionsTestJSON-257954437-project-member] Created folder: Instances in parent group-v294598. [ 894.358476] env[62503]: DEBUG oslo.service.loopingcall [None req-784ff18d-15f2-4cb8-8623-ff87e0cf2977 tempest-InstanceActionsTestJSON-257954437 tempest-InstanceActionsTestJSON-257954437-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 894.359060] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c39e7ee3-1b97-44ec-92d6-733976c0f0f8] Creating VM on the ESX host {{(pid=62503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 894.359313] env[62503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5ad00115-8ba1-4bbf-9b4f-ecb85241b568 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.389824] env[62503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 894.389824] env[62503]: value = "task-1387865" [ 894.389824] env[62503]: _type = "Task" [ 894.389824] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 894.397848] env[62503]: DEBUG oslo_vmware.api [-] Task: {'id': task-1387865, 'name': CreateVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.407028] env[62503]: DEBUG oslo_vmware.api [None req-409bdb25-8a95-4fcd-b277-69a03419ff2a tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]520c24f2-b53c-28b3-8d09-0e7627240975, 'name': SearchDatastore_Task, 'duration_secs': 0.01762} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 894.407313] env[62503]: DEBUG oslo_concurrency.lockutils [None req-409bdb25-8a95-4fcd-b277-69a03419ff2a tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Releasing lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 894.407579] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-409bdb25-8a95-4fcd-b277-69a03419ff2a tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk to [datastore1] 629054bb-8fdb-45a2-8c07-216c4104d4a6/629054bb-8fdb-45a2-8c07-216c4104d4a6.vmdk {{(pid=62503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 894.407849] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a568a05c-b26b-4a52-bd05-ff17a75a20fc {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.414131] env[62503]: DEBUG oslo_vmware.api [None req-409bdb25-8a95-4fcd-b277-69a03419ff2a tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Waiting for the task: (returnval){ [ 894.414131] env[62503]: value = "task-1387866" [ 894.414131] env[62503]: _type = "Task" [ 894.414131] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 894.424288] env[62503]: DEBUG oslo_vmware.api [None req-409bdb25-8a95-4fcd-b277-69a03419ff2a tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Task: {'id': task-1387866, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.527576] env[62503]: DEBUG nova.compute.manager [req-c6e4f90e-5ae4-4ec2-b7db-062cee271dd8 req-e93d0651-2938-4525-a3b6-5f8f26f54910 service nova] [instance: 7b8c670d-3f2a-431d-91da-4ced781e6e51] Received event network-vif-plugged-e75a1fc0-2c4a-4103-8539-b8a7d4f9b24e {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 894.527576] env[62503]: DEBUG oslo_concurrency.lockutils [req-c6e4f90e-5ae4-4ec2-b7db-062cee271dd8 req-e93d0651-2938-4525-a3b6-5f8f26f54910 service nova] Acquiring lock "7b8c670d-3f2a-431d-91da-4ced781e6e51-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 894.527576] env[62503]: DEBUG oslo_concurrency.lockutils [req-c6e4f90e-5ae4-4ec2-b7db-062cee271dd8 req-e93d0651-2938-4525-a3b6-5f8f26f54910 service nova] Lock "7b8c670d-3f2a-431d-91da-4ced781e6e51-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 894.527769] env[62503]: DEBUG oslo_concurrency.lockutils [req-c6e4f90e-5ae4-4ec2-b7db-062cee271dd8 req-e93d0651-2938-4525-a3b6-5f8f26f54910 service nova] Lock "7b8c670d-3f2a-431d-91da-4ced781e6e51-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 894.528048] env[62503]: DEBUG nova.compute.manager [req-c6e4f90e-5ae4-4ec2-b7db-062cee271dd8 req-e93d0651-2938-4525-a3b6-5f8f26f54910 service nova] [instance: 7b8c670d-3f2a-431d-91da-4ced781e6e51] No waiting events found dispatching network-vif-plugged-e75a1fc0-2c4a-4103-8539-b8a7d4f9b24e {{(pid=62503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 894.528367] env[62503]: WARNING nova.compute.manager [req-c6e4f90e-5ae4-4ec2-b7db-062cee271dd8 req-e93d0651-2938-4525-a3b6-5f8f26f54910 service nova] [instance: 7b8c670d-3f2a-431d-91da-4ced781e6e51] Received unexpected event network-vif-plugged-e75a1fc0-2c4a-4103-8539-b8a7d4f9b24e for instance with vm_state building and task_state spawning. [ 894.569575] env[62503]: DEBUG oslo_concurrency.lockutils [None req-2026f2a3-ca55-406d-af72-4e933a561068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.277s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 894.569575] env[62503]: DEBUG nova.compute.manager [None req-2026f2a3-ca55-406d-af72-4e933a561068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: c09488ed-e354-4abf-8999-b2f8afec44fc] Start building networks asynchronously for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2832}} [ 894.574819] env[62503]: DEBUG oslo_concurrency.lockutils [None req-085a834d-69f1-4b6f-a2c6-d0f37cb5d14b tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.651s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 894.578522] env[62503]: INFO nova.compute.claims [None req-085a834d-69f1-4b6f-a2c6-d0f37cb5d14b tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] [instance: e7556915-634f-40d6-9e7f-da1c3201d8e4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 894.640573] env[62503]: DEBUG oslo_vmware.api [None req-bc583026-98f6-4899-b166-facb931d7a18 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Task: {'id': task-1387861, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.631523} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 894.640862] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc583026-98f6-4899-b166-facb931d7a18 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk to [datastore2] ca1f1966-bfe1-495e-b055-f72150f72470/ca1f1966-bfe1-495e-b055-f72150f72470.vmdk {{(pid=62503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 894.641335] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-bc583026-98f6-4899-b166-facb931d7a18 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: ca1f1966-bfe1-495e-b055-f72150f72470] Extending root virtual disk to 1048576 {{(pid=62503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 894.641420] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-aeb76e1d-c574-4ce3-bf0f-ffeeb64e8c26 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.649462] env[62503]: DEBUG oslo_vmware.api [None req-bc583026-98f6-4899-b166-facb931d7a18 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Waiting for the task: (returnval){ [ 894.649462] env[62503]: value = "task-1387867" [ 894.649462] env[62503]: _type = "Task" [ 894.649462] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 894.658199] env[62503]: DEBUG oslo_vmware.api [None req-bc583026-98f6-4899-b166-facb931d7a18 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Task: {'id': task-1387867, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.690731] env[62503]: DEBUG nova.network.neutron [None req-30e8662c-df08-452b-b0da-6a8680594759 tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] [instance: 7b8c670d-3f2a-431d-91da-4ced781e6e51] Successfully updated port: e75a1fc0-2c4a-4103-8539-b8a7d4f9b24e {{(pid=62503) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 894.743260] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-da58a238-4e42-4269-977f-a144551c2ae1 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: 16167e53-e45b-4b37-90c6-ab2c30ebf1aa] Creating Snapshot of the VM instance {{(pid=62503) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 894.743716] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-c4e403e7-3a0a-49e1-bb86-a9e9df21d1dc {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.751281] env[62503]: DEBUG oslo_vmware.api [None req-da58a238-4e42-4269-977f-a144551c2ae1 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Waiting for the task: (returnval){ [ 894.751281] env[62503]: value = "task-1387868" [ 894.751281] env[62503]: _type = "Task" [ 894.751281] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 894.759556] env[62503]: DEBUG oslo_vmware.api [None req-da58a238-4e42-4269-977f-a144551c2ae1 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Task: {'id': task-1387868, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.900936] env[62503]: DEBUG oslo_vmware.api [-] Task: {'id': task-1387865, 'name': CreateVM_Task} progress is 25%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.927628] env[62503]: DEBUG oslo_vmware.api [None req-409bdb25-8a95-4fcd-b277-69a03419ff2a tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Task: {'id': task-1387866, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.074080] env[62503]: DEBUG nova.network.neutron [req-d134eb1b-f4a4-490d-a1b2-cfb2b0965d6e req-4936343a-6c12-4c4f-9750-600b306fece5 service nova] [instance: c39e7ee3-1b97-44ec-92d6-733976c0f0f8] Updated VIF entry in instance network info cache for port ad311052-6c0b-4b0a-ad92-0d1198dbad4c. {{(pid=62503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 895.074248] env[62503]: DEBUG nova.network.neutron [req-d134eb1b-f4a4-490d-a1b2-cfb2b0965d6e req-4936343a-6c12-4c4f-9750-600b306fece5 service nova] [instance: c39e7ee3-1b97-44ec-92d6-733976c0f0f8] Updating instance_info_cache with network_info: [{"id": "ad311052-6c0b-4b0a-ad92-0d1198dbad4c", "address": "fa:16:3e:3b:38:32", "network": {"id": "082037b2-ad22-4d1a-84b7-a53daf45a565", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-1411425723-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f28a2b1084de4f3eafb800bafb0ab68c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c1e7173e-4163-4212-9339-aea3eddd359e", "external-id": "nsx-vlan-transportzone-525", "segmentation_id": 525, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapad311052-6c", "ovs_interfaceid": "ad311052-6c0b-4b0a-ad92-0d1198dbad4c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 895.085039] env[62503]: DEBUG nova.compute.utils [None req-2026f2a3-ca55-406d-af72-4e933a561068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Using /dev/sd instead of None {{(pid=62503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 895.089109] env[62503]: DEBUG nova.compute.manager [None req-2026f2a3-ca55-406d-af72-4e933a561068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: c09488ed-e354-4abf-8999-b2f8afec44fc] Allocating IP information in the background. {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 895.089109] env[62503]: DEBUG nova.network.neutron [None req-2026f2a3-ca55-406d-af72-4e933a561068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: c09488ed-e354-4abf-8999-b2f8afec44fc] allocate_for_instance() {{(pid=62503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 895.138141] env[62503]: DEBUG nova.policy [None req-2026f2a3-ca55-406d-af72-4e933a561068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '691809da402d4a29b085cfe3b22306b2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a833cd3315d0487cb3badd7b0d330a9a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62503) authorize /opt/stack/nova/nova/policy.py:201}} [ 895.163673] env[62503]: DEBUG oslo_vmware.api [None req-bc583026-98f6-4899-b166-facb931d7a18 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Task: {'id': task-1387867, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.194177} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 895.164105] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-bc583026-98f6-4899-b166-facb931d7a18 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: ca1f1966-bfe1-495e-b055-f72150f72470] Extended root virtual disk {{(pid=62503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 895.165065] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-086c0a6e-8399-4f80-affa-e519217e862f {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.193475] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-bc583026-98f6-4899-b166-facb931d7a18 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: ca1f1966-bfe1-495e-b055-f72150f72470] Reconfiguring VM instance instance-0000004b to attach disk [datastore2] ca1f1966-bfe1-495e-b055-f72150f72470/ca1f1966-bfe1-495e-b055-f72150f72470.vmdk or device None with type sparse {{(pid=62503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 895.194369] env[62503]: DEBUG oslo_concurrency.lockutils [None req-30e8662c-df08-452b-b0da-6a8680594759 tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Acquiring lock "refresh_cache-7b8c670d-3f2a-431d-91da-4ced781e6e51" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 895.194616] env[62503]: DEBUG oslo_concurrency.lockutils [None req-30e8662c-df08-452b-b0da-6a8680594759 tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Acquired lock "refresh_cache-7b8c670d-3f2a-431d-91da-4ced781e6e51" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 895.194943] env[62503]: DEBUG nova.network.neutron [None req-30e8662c-df08-452b-b0da-6a8680594759 tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] [instance: 7b8c670d-3f2a-431d-91da-4ced781e6e51] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 895.196321] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8d683657-745d-489e-9bdb-868b097617ce {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.219593] env[62503]: DEBUG oslo_vmware.api [None req-bc583026-98f6-4899-b166-facb931d7a18 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Waiting for the task: (returnval){ [ 895.219593] env[62503]: value = "task-1387869" [ 895.219593] env[62503]: _type = "Task" [ 895.219593] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 895.229861] env[62503]: DEBUG oslo_vmware.api [None req-bc583026-98f6-4899-b166-facb931d7a18 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Task: {'id': task-1387869, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.243222] env[62503]: DEBUG nova.network.neutron [None req-30e8662c-df08-452b-b0da-6a8680594759 tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] [instance: 7b8c670d-3f2a-431d-91da-4ced781e6e51] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 895.260360] env[62503]: DEBUG oslo_vmware.api [None req-da58a238-4e42-4269-977f-a144551c2ae1 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Task: {'id': task-1387868, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.390837] env[62503]: DEBUG nova.network.neutron [None req-30e8662c-df08-452b-b0da-6a8680594759 tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] [instance: 7b8c670d-3f2a-431d-91da-4ced781e6e51] Updating instance_info_cache with network_info: [{"id": "e75a1fc0-2c4a-4103-8539-b8a7d4f9b24e", "address": "fa:16:3e:a7:71:7c", "network": {"id": "db32bcf6-4f31-49a6-a030-459c7952b72c", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1233897985-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eac2608fc52a497f961d018c888a826f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271a82f1-1d09-4ad3-9c15-07269bad114c", "external-id": "nsx-vlan-transportzone-441", "segmentation_id": 441, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape75a1fc0-2c", "ovs_interfaceid": "e75a1fc0-2c4a-4103-8539-b8a7d4f9b24e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 895.403568] env[62503]: DEBUG oslo_vmware.api [-] Task: {'id': task-1387865, 'name': CreateVM_Task, 'duration_secs': 0.844276} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 895.403744] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c39e7ee3-1b97-44ec-92d6-733976c0f0f8] Created VM on the ESX host {{(pid=62503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 895.404661] env[62503]: DEBUG oslo_concurrency.lockutils [None req-784ff18d-15f2-4cb8-8623-ff87e0cf2977 tempest-InstanceActionsTestJSON-257954437 tempest-InstanceActionsTestJSON-257954437-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 895.404661] env[62503]: DEBUG oslo_concurrency.lockutils [None req-784ff18d-15f2-4cb8-8623-ff87e0cf2977 tempest-InstanceActionsTestJSON-257954437 tempest-InstanceActionsTestJSON-257954437-project-member] Acquired lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 895.404946] env[62503]: DEBUG oslo_concurrency.lockutils [None req-784ff18d-15f2-4cb8-8623-ff87e0cf2977 tempest-InstanceActionsTestJSON-257954437 tempest-InstanceActionsTestJSON-257954437-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 895.405217] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a449a8b2-2df7-475a-a876-1bb83059d0c1 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.410698] env[62503]: DEBUG oslo_vmware.api [None req-784ff18d-15f2-4cb8-8623-ff87e0cf2977 tempest-InstanceActionsTestJSON-257954437 tempest-InstanceActionsTestJSON-257954437-project-member] Waiting for the task: (returnval){ [ 895.410698] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52a940c9-1c13-c4d2-262c-fe7aa7a27a83" [ 895.410698] env[62503]: _type = "Task" [ 895.410698] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 895.421748] env[62503]: DEBUG oslo_vmware.api [None req-784ff18d-15f2-4cb8-8623-ff87e0cf2977 tempest-InstanceActionsTestJSON-257954437 tempest-InstanceActionsTestJSON-257954437-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52a940c9-1c13-c4d2-262c-fe7aa7a27a83, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.427313] env[62503]: DEBUG oslo_vmware.api [None req-409bdb25-8a95-4fcd-b277-69a03419ff2a tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Task: {'id': task-1387866, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.817903} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 895.427585] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-409bdb25-8a95-4fcd-b277-69a03419ff2a tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk to [datastore1] 629054bb-8fdb-45a2-8c07-216c4104d4a6/629054bb-8fdb-45a2-8c07-216c4104d4a6.vmdk {{(pid=62503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 895.427804] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-409bdb25-8a95-4fcd-b277-69a03419ff2a tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] [instance: 629054bb-8fdb-45a2-8c07-216c4104d4a6] Extending root virtual disk to 1048576 {{(pid=62503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 895.428062] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d7c03a80-892b-44c7-a2c5-984e61168299 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.436604] env[62503]: DEBUG oslo_vmware.api [None req-409bdb25-8a95-4fcd-b277-69a03419ff2a tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Waiting for the task: (returnval){ [ 895.436604] env[62503]: value = "task-1387870" [ 895.436604] env[62503]: _type = "Task" [ 895.436604] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 895.443115] env[62503]: DEBUG oslo_vmware.api [None req-409bdb25-8a95-4fcd-b277-69a03419ff2a tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Task: {'id': task-1387870, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.443926] env[62503]: DEBUG nova.network.neutron [None req-2026f2a3-ca55-406d-af72-4e933a561068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: c09488ed-e354-4abf-8999-b2f8afec44fc] Successfully created port: 305fe606-9e39-4466-9423-e9168de3ec21 {{(pid=62503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 895.577078] env[62503]: DEBUG oslo_concurrency.lockutils [req-d134eb1b-f4a4-490d-a1b2-cfb2b0965d6e req-4936343a-6c12-4c4f-9750-600b306fece5 service nova] Releasing lock "refresh_cache-c39e7ee3-1b97-44ec-92d6-733976c0f0f8" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 895.593932] env[62503]: DEBUG nova.compute.manager [None req-2026f2a3-ca55-406d-af72-4e933a561068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: c09488ed-e354-4abf-8999-b2f8afec44fc] Start building block device mappings for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2867}} [ 895.731401] env[62503]: DEBUG oslo_vmware.api [None req-bc583026-98f6-4899-b166-facb931d7a18 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Task: {'id': task-1387869, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.761667] env[62503]: DEBUG oslo_vmware.api [None req-da58a238-4e42-4269-977f-a144551c2ae1 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Task: {'id': task-1387868, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.790717] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-274804e2-971f-42db-8b44-f06a509abc89 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.798054] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22da94d6-741d-4b11-95c4-b7e634caf01d {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.829199] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a5c5649-e561-4a98-bbbe-83eebaa29cd9 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.837097] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8a51e3a-835f-46e4-bcef-50e10591d42e {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.851783] env[62503]: DEBUG nova.compute.provider_tree [None req-085a834d-69f1-4b6f-a2c6-d0f37cb5d14b tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 895.898595] env[62503]: DEBUG oslo_concurrency.lockutils [None req-30e8662c-df08-452b-b0da-6a8680594759 tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Releasing lock "refresh_cache-7b8c670d-3f2a-431d-91da-4ced781e6e51" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 895.898920] env[62503]: DEBUG nova.compute.manager [None req-30e8662c-df08-452b-b0da-6a8680594759 tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] [instance: 7b8c670d-3f2a-431d-91da-4ced781e6e51] Instance network_info: |[{"id": "e75a1fc0-2c4a-4103-8539-b8a7d4f9b24e", "address": "fa:16:3e:a7:71:7c", "network": {"id": "db32bcf6-4f31-49a6-a030-459c7952b72c", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1233897985-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eac2608fc52a497f961d018c888a826f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271a82f1-1d09-4ad3-9c15-07269bad114c", "external-id": "nsx-vlan-transportzone-441", "segmentation_id": 441, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape75a1fc0-2c", "ovs_interfaceid": "e75a1fc0-2c4a-4103-8539-b8a7d4f9b24e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1999}} [ 895.899365] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-30e8662c-df08-452b-b0da-6a8680594759 tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] [instance: 7b8c670d-3f2a-431d-91da-4ced781e6e51] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a7:71:7c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '271a82f1-1d09-4ad3-9c15-07269bad114c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e75a1fc0-2c4a-4103-8539-b8a7d4f9b24e', 'vif_model': 'vmxnet3'}] {{(pid=62503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 895.906918] env[62503]: DEBUG oslo.service.loopingcall [None req-30e8662c-df08-452b-b0da-6a8680594759 tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 895.907146] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7b8c670d-3f2a-431d-91da-4ced781e6e51] Creating VM on the ESX host {{(pid=62503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 895.907370] env[62503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-79459a44-55d1-41e8-916b-00fd028f6a8d {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.930057] env[62503]: DEBUG oslo_vmware.api [None req-784ff18d-15f2-4cb8-8623-ff87e0cf2977 tempest-InstanceActionsTestJSON-257954437 tempest-InstanceActionsTestJSON-257954437-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52a940c9-1c13-c4d2-262c-fe7aa7a27a83, 'name': SearchDatastore_Task, 'duration_secs': 0.033969} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 895.931399] env[62503]: DEBUG oslo_concurrency.lockutils [None req-784ff18d-15f2-4cb8-8623-ff87e0cf2977 tempest-InstanceActionsTestJSON-257954437 tempest-InstanceActionsTestJSON-257954437-project-member] Releasing lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 895.931621] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-784ff18d-15f2-4cb8-8623-ff87e0cf2977 tempest-InstanceActionsTestJSON-257954437 tempest-InstanceActionsTestJSON-257954437-project-member] [instance: c39e7ee3-1b97-44ec-92d6-733976c0f0f8] Processing image 8150ca02-f879-471d-8913-459408f127a1 {{(pid=62503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 895.931872] env[62503]: DEBUG oslo_concurrency.lockutils [None req-784ff18d-15f2-4cb8-8623-ff87e0cf2977 tempest-InstanceActionsTestJSON-257954437 tempest-InstanceActionsTestJSON-257954437-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 895.932036] env[62503]: DEBUG oslo_concurrency.lockutils [None req-784ff18d-15f2-4cb8-8623-ff87e0cf2977 tempest-InstanceActionsTestJSON-257954437 tempest-InstanceActionsTestJSON-257954437-project-member] Acquired lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 895.932226] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-784ff18d-15f2-4cb8-8623-ff87e0cf2977 tempest-InstanceActionsTestJSON-257954437 tempest-InstanceActionsTestJSON-257954437-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 895.932468] env[62503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 895.932468] env[62503]: value = "task-1387871" [ 895.932468] env[62503]: _type = "Task" [ 895.932468] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 895.932631] env[62503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0acbbae1-8ca0-48ab-9230-1d1202c35203 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.944463] env[62503]: DEBUG oslo_vmware.api [-] Task: {'id': task-1387871, 'name': CreateVM_Task} progress is 5%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.947979] env[62503]: DEBUG oslo_vmware.api [None req-409bdb25-8a95-4fcd-b277-69a03419ff2a tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Task: {'id': task-1387870, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06445} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 895.948246] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-784ff18d-15f2-4cb8-8623-ff87e0cf2977 tempest-InstanceActionsTestJSON-257954437 tempest-InstanceActionsTestJSON-257954437-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 895.948473] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-784ff18d-15f2-4cb8-8623-ff87e0cf2977 tempest-InstanceActionsTestJSON-257954437 tempest-InstanceActionsTestJSON-257954437-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 895.949279] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-409bdb25-8a95-4fcd-b277-69a03419ff2a tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] [instance: 629054bb-8fdb-45a2-8c07-216c4104d4a6] Extended root virtual disk {{(pid=62503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 895.949566] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-90529f1c-8365-424b-9598-a68ba94886aa {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.952300] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ea3ae02-f8ab-4ff0-bd55-0fa28707fe88 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.959216] env[62503]: DEBUG oslo_vmware.api [None req-784ff18d-15f2-4cb8-8623-ff87e0cf2977 tempest-InstanceActionsTestJSON-257954437 tempest-InstanceActionsTestJSON-257954437-project-member] Waiting for the task: (returnval){ [ 895.959216] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52e64a05-2b94-1af2-4b65-3d343c0e90ef" [ 895.959216] env[62503]: _type = "Task" [ 895.959216] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 895.977542] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-409bdb25-8a95-4fcd-b277-69a03419ff2a tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] [instance: 629054bb-8fdb-45a2-8c07-216c4104d4a6] Reconfiguring VM instance instance-0000004c to attach disk [datastore1] 629054bb-8fdb-45a2-8c07-216c4104d4a6/629054bb-8fdb-45a2-8c07-216c4104d4a6.vmdk or device None with type sparse {{(pid=62503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 895.980914] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-59f21db3-4690-4d4b-9c97-1e6831f0656e {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.002644] env[62503]: DEBUG oslo_vmware.api [None req-784ff18d-15f2-4cb8-8623-ff87e0cf2977 tempest-InstanceActionsTestJSON-257954437 tempest-InstanceActionsTestJSON-257954437-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52e64a05-2b94-1af2-4b65-3d343c0e90ef, 'name': SearchDatastore_Task, 'duration_secs': 0.016878} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 896.004898] env[62503]: DEBUG oslo_vmware.api [None req-409bdb25-8a95-4fcd-b277-69a03419ff2a tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Waiting for the task: (returnval){ [ 896.004898] env[62503]: value = "task-1387872" [ 896.004898] env[62503]: _type = "Task" [ 896.004898] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 896.005124] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ccd68d43-7639-4951-ad5a-aaca4a492a58 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.014223] env[62503]: DEBUG oslo_vmware.api [None req-784ff18d-15f2-4cb8-8623-ff87e0cf2977 tempest-InstanceActionsTestJSON-257954437 tempest-InstanceActionsTestJSON-257954437-project-member] Waiting for the task: (returnval){ [ 896.014223] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52adb6ef-3357-4d9c-43c7-3377c01366e8" [ 896.014223] env[62503]: _type = "Task" [ 896.014223] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 896.018156] env[62503]: DEBUG oslo_vmware.api [None req-409bdb25-8a95-4fcd-b277-69a03419ff2a tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Task: {'id': task-1387872, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.025928] env[62503]: DEBUG oslo_vmware.api [None req-784ff18d-15f2-4cb8-8623-ff87e0cf2977 tempest-InstanceActionsTestJSON-257954437 tempest-InstanceActionsTestJSON-257954437-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52adb6ef-3357-4d9c-43c7-3377c01366e8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.232884] env[62503]: DEBUG oslo_vmware.api [None req-bc583026-98f6-4899-b166-facb931d7a18 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Task: {'id': task-1387869, 'name': ReconfigVM_Task, 'duration_secs': 0.896428} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 896.233228] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-bc583026-98f6-4899-b166-facb931d7a18 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: ca1f1966-bfe1-495e-b055-f72150f72470] Reconfigured VM instance instance-0000004b to attach disk [datastore2] ca1f1966-bfe1-495e-b055-f72150f72470/ca1f1966-bfe1-495e-b055-f72150f72470.vmdk or device None with type sparse {{(pid=62503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 896.233877] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f698f18d-db75-4372-a975-dfa7f452372d {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.240114] env[62503]: DEBUG oslo_vmware.api [None req-bc583026-98f6-4899-b166-facb931d7a18 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Waiting for the task: (returnval){ [ 896.240114] env[62503]: value = "task-1387873" [ 896.240114] env[62503]: _type = "Task" [ 896.240114] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 896.247933] env[62503]: DEBUG oslo_vmware.api [None req-bc583026-98f6-4899-b166-facb931d7a18 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Task: {'id': task-1387873, 'name': Rename_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.261214] env[62503]: DEBUG oslo_vmware.api [None req-da58a238-4e42-4269-977f-a144551c2ae1 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Task: {'id': task-1387868, 'name': CreateSnapshot_Task, 'duration_secs': 1.262028} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 896.261506] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-da58a238-4e42-4269-977f-a144551c2ae1 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: 16167e53-e45b-4b37-90c6-ab2c30ebf1aa] Created Snapshot of the VM instance {{(pid=62503) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 896.262271] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67a25826-f2e2-4567-b7dc-7760f4b4b636 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.354836] env[62503]: DEBUG nova.scheduler.client.report [None req-085a834d-69f1-4b6f-a2c6-d0f37cb5d14b tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 896.445174] env[62503]: DEBUG oslo_vmware.api [-] Task: {'id': task-1387871, 'name': CreateVM_Task, 'duration_secs': 0.471125} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 896.445340] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7b8c670d-3f2a-431d-91da-4ced781e6e51] Created VM on the ESX host {{(pid=62503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 896.446135] env[62503]: DEBUG oslo_concurrency.lockutils [None req-30e8662c-df08-452b-b0da-6a8680594759 tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 896.446318] env[62503]: DEBUG oslo_concurrency.lockutils [None req-30e8662c-df08-452b-b0da-6a8680594759 tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Acquired lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 896.446636] env[62503]: DEBUG oslo_concurrency.lockutils [None req-30e8662c-df08-452b-b0da-6a8680594759 tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 896.446887] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-22a687bb-ec91-428c-8e91-4e57fbb5cad3 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.451595] env[62503]: DEBUG oslo_vmware.api [None req-30e8662c-df08-452b-b0da-6a8680594759 tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Waiting for the task: (returnval){ [ 896.451595] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]525e7550-98f8-2678-35a0-821e22eb732c" [ 896.451595] env[62503]: _type = "Task" [ 896.451595] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 896.459976] env[62503]: DEBUG oslo_vmware.api [None req-30e8662c-df08-452b-b0da-6a8680594759 tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]525e7550-98f8-2678-35a0-821e22eb732c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.516837] env[62503]: DEBUG oslo_vmware.api [None req-409bdb25-8a95-4fcd-b277-69a03419ff2a tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Task: {'id': task-1387872, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.527014] env[62503]: DEBUG oslo_vmware.api [None req-784ff18d-15f2-4cb8-8623-ff87e0cf2977 tempest-InstanceActionsTestJSON-257954437 tempest-InstanceActionsTestJSON-257954437-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52adb6ef-3357-4d9c-43c7-3377c01366e8, 'name': SearchDatastore_Task, 'duration_secs': 0.015255} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 896.527294] env[62503]: DEBUG oslo_concurrency.lockutils [None req-784ff18d-15f2-4cb8-8623-ff87e0cf2977 tempest-InstanceActionsTestJSON-257954437 tempest-InstanceActionsTestJSON-257954437-project-member] Releasing lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 896.527560] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-784ff18d-15f2-4cb8-8623-ff87e0cf2977 tempest-InstanceActionsTestJSON-257954437 tempest-InstanceActionsTestJSON-257954437-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk to [datastore1] c39e7ee3-1b97-44ec-92d6-733976c0f0f8/c39e7ee3-1b97-44ec-92d6-733976c0f0f8.vmdk {{(pid=62503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 896.527819] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-66f2a4ea-5255-4f9d-8e3a-d5aed2b9c2d4 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.534041] env[62503]: DEBUG oslo_vmware.api [None req-784ff18d-15f2-4cb8-8623-ff87e0cf2977 tempest-InstanceActionsTestJSON-257954437 tempest-InstanceActionsTestJSON-257954437-project-member] Waiting for the task: (returnval){ [ 896.534041] env[62503]: value = "task-1387874" [ 896.534041] env[62503]: _type = "Task" [ 896.534041] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 896.540731] env[62503]: DEBUG oslo_vmware.api [None req-784ff18d-15f2-4cb8-8623-ff87e0cf2977 tempest-InstanceActionsTestJSON-257954437 tempest-InstanceActionsTestJSON-257954437-project-member] Task: {'id': task-1387874, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.552572] env[62503]: DEBUG nova.compute.manager [req-8a3b026e-ec4d-43ac-907e-755b85e25e01 req-bad7c873-44e3-4b14-8b4c-8bd6aad4546b service nova] [instance: 7b8c670d-3f2a-431d-91da-4ced781e6e51] Received event network-changed-e75a1fc0-2c4a-4103-8539-b8a7d4f9b24e {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 896.552784] env[62503]: DEBUG nova.compute.manager [req-8a3b026e-ec4d-43ac-907e-755b85e25e01 req-bad7c873-44e3-4b14-8b4c-8bd6aad4546b service nova] [instance: 7b8c670d-3f2a-431d-91da-4ced781e6e51] Refreshing instance network info cache due to event network-changed-e75a1fc0-2c4a-4103-8539-b8a7d4f9b24e. {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11432}} [ 896.552975] env[62503]: DEBUG oslo_concurrency.lockutils [req-8a3b026e-ec4d-43ac-907e-755b85e25e01 req-bad7c873-44e3-4b14-8b4c-8bd6aad4546b service nova] Acquiring lock "refresh_cache-7b8c670d-3f2a-431d-91da-4ced781e6e51" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 896.553138] env[62503]: DEBUG oslo_concurrency.lockutils [req-8a3b026e-ec4d-43ac-907e-755b85e25e01 req-bad7c873-44e3-4b14-8b4c-8bd6aad4546b service nova] Acquired lock "refresh_cache-7b8c670d-3f2a-431d-91da-4ced781e6e51" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 896.553302] env[62503]: DEBUG nova.network.neutron [req-8a3b026e-ec4d-43ac-907e-755b85e25e01 req-bad7c873-44e3-4b14-8b4c-8bd6aad4546b service nova] [instance: 7b8c670d-3f2a-431d-91da-4ced781e6e51] Refreshing network info cache for port e75a1fc0-2c4a-4103-8539-b8a7d4f9b24e {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 896.606117] env[62503]: DEBUG nova.compute.manager [None req-2026f2a3-ca55-406d-af72-4e933a561068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: c09488ed-e354-4abf-8999-b2f8afec44fc] Start spawning the instance on the hypervisor. {{(pid=62503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2641}} [ 896.632470] env[62503]: DEBUG nova.virt.hardware [None req-2026f2a3-ca55-406d-af72-4e933a561068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:26:20Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='b6e0a6bd781a8b51a2af924c35a3e226',container_format='bare',created_at=2024-10-31T11:33:32Z,direct_url=,disk_format='vmdk',id=3306da0e-1d43-4f11-be11-5fe7cf1194eb,min_disk=1,min_ram=0,name='tempest-test-snap-1162704797',owner='a833cd3315d0487cb3badd7b0d330a9a',properties=ImageMetaProps,protected=,size=21334016,status='active',tags=,updated_at=2024-10-31T11:33:46Z,virtual_size=,visibility=), allow threads: False {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 896.632778] env[62503]: DEBUG nova.virt.hardware [None req-2026f2a3-ca55-406d-af72-4e933a561068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Flavor limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 896.632972] env[62503]: DEBUG nova.virt.hardware [None req-2026f2a3-ca55-406d-af72-4e933a561068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Image limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 896.633187] env[62503]: DEBUG nova.virt.hardware [None req-2026f2a3-ca55-406d-af72-4e933a561068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Flavor pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 896.633345] env[62503]: DEBUG nova.virt.hardware [None req-2026f2a3-ca55-406d-af72-4e933a561068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Image pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 896.633497] env[62503]: DEBUG nova.virt.hardware [None req-2026f2a3-ca55-406d-af72-4e933a561068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 896.633714] env[62503]: DEBUG nova.virt.hardware [None req-2026f2a3-ca55-406d-af72-4e933a561068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 896.633876] env[62503]: DEBUG nova.virt.hardware [None req-2026f2a3-ca55-406d-af72-4e933a561068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 896.634057] env[62503]: DEBUG nova.virt.hardware [None req-2026f2a3-ca55-406d-af72-4e933a561068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Got 1 possible topologies {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 896.634228] env[62503]: DEBUG nova.virt.hardware [None req-2026f2a3-ca55-406d-af72-4e933a561068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 896.634404] env[62503]: DEBUG nova.virt.hardware [None req-2026f2a3-ca55-406d-af72-4e933a561068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 896.635278] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ce61fd5-ec74-45a4-9059-fe7e374da1fd {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.643054] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60fe4030-2bce-45c9-bfa4-cf48ba70d854 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.752805] env[62503]: DEBUG oslo_vmware.api [None req-bc583026-98f6-4899-b166-facb931d7a18 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Task: {'id': task-1387873, 'name': Rename_Task, 'duration_secs': 0.161469} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 896.753156] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc583026-98f6-4899-b166-facb931d7a18 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: ca1f1966-bfe1-495e-b055-f72150f72470] Powering on the VM {{(pid=62503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 896.753441] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-badb8efe-a0c7-4c3f-a52f-220004d671c7 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.760861] env[62503]: DEBUG oslo_vmware.api [None req-bc583026-98f6-4899-b166-facb931d7a18 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Waiting for the task: (returnval){ [ 896.760861] env[62503]: value = "task-1387875" [ 896.760861] env[62503]: _type = "Task" [ 896.760861] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 896.770327] env[62503]: DEBUG oslo_vmware.api [None req-bc583026-98f6-4899-b166-facb931d7a18 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Task: {'id': task-1387875, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.780875] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-da58a238-4e42-4269-977f-a144551c2ae1 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: 16167e53-e45b-4b37-90c6-ab2c30ebf1aa] Creating linked-clone VM from snapshot {{(pid=62503) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 896.781205] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-326de8da-9ea6-4a02-a78b-a5f7a4a56266 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.789138] env[62503]: DEBUG oslo_vmware.api [None req-da58a238-4e42-4269-977f-a144551c2ae1 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Waiting for the task: (returnval){ [ 896.789138] env[62503]: value = "task-1387876" [ 896.789138] env[62503]: _type = "Task" [ 896.789138] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 896.798958] env[62503]: DEBUG oslo_vmware.api [None req-da58a238-4e42-4269-977f-a144551c2ae1 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Task: {'id': task-1387876, 'name': CloneVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.860881] env[62503]: DEBUG oslo_concurrency.lockutils [None req-085a834d-69f1-4b6f-a2c6-d0f37cb5d14b tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.286s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 896.861496] env[62503]: DEBUG nova.compute.manager [None req-085a834d-69f1-4b6f-a2c6-d0f37cb5d14b tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] [instance: e7556915-634f-40d6-9e7f-da1c3201d8e4] Start building networks asynchronously for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2832}} [ 896.864645] env[62503]: DEBUG oslo_concurrency.lockutils [None req-4fdc7626-9734-4113-a7b4-a544a2ca22f6 tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 7.056s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 896.864918] env[62503]: DEBUG nova.objects.instance [None req-4fdc7626-9734-4113-a7b4-a544a2ca22f6 tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] Lazy-loading 'resources' on Instance uuid cf611345-d276-4745-a2f8-0551c9dca2c2 {{(pid=62503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 896.965580] env[62503]: DEBUG oslo_vmware.api [None req-30e8662c-df08-452b-b0da-6a8680594759 tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]525e7550-98f8-2678-35a0-821e22eb732c, 'name': SearchDatastore_Task, 'duration_secs': 0.010217} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 896.966641] env[62503]: DEBUG oslo_concurrency.lockutils [None req-30e8662c-df08-452b-b0da-6a8680594759 tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Releasing lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 896.966641] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-30e8662c-df08-452b-b0da-6a8680594759 tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] [instance: 7b8c670d-3f2a-431d-91da-4ced781e6e51] Processing image 8150ca02-f879-471d-8913-459408f127a1 {{(pid=62503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 896.966641] env[62503]: DEBUG oslo_concurrency.lockutils [None req-30e8662c-df08-452b-b0da-6a8680594759 tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 896.966641] env[62503]: DEBUG oslo_concurrency.lockutils [None req-30e8662c-df08-452b-b0da-6a8680594759 tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Acquired lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 896.966935] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-30e8662c-df08-452b-b0da-6a8680594759 tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 896.967244] env[62503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-981fd07c-b85b-432c-8816-14fbd9257418 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.985399] env[62503]: DEBUG nova.network.neutron [None req-2026f2a3-ca55-406d-af72-4e933a561068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: c09488ed-e354-4abf-8999-b2f8afec44fc] Successfully updated port: 305fe606-9e39-4466-9423-e9168de3ec21 {{(pid=62503) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 896.997026] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-30e8662c-df08-452b-b0da-6a8680594759 tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 896.997396] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-30e8662c-df08-452b-b0da-6a8680594759 tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 896.998512] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-48752767-54e6-4d4b-898f-63964a58cd13 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.005481] env[62503]: DEBUG oslo_vmware.api [None req-30e8662c-df08-452b-b0da-6a8680594759 tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Waiting for the task: (returnval){ [ 897.005481] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]525a2cd5-07c8-0b27-2a20-9cd76e46a99e" [ 897.005481] env[62503]: _type = "Task" [ 897.005481] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 897.018433] env[62503]: DEBUG oslo_vmware.api [None req-30e8662c-df08-452b-b0da-6a8680594759 tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]525a2cd5-07c8-0b27-2a20-9cd76e46a99e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.021751] env[62503]: DEBUG oslo_vmware.api [None req-409bdb25-8a95-4fcd-b277-69a03419ff2a tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Task: {'id': task-1387872, 'name': ReconfigVM_Task, 'duration_secs': 0.560184} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 897.022039] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-409bdb25-8a95-4fcd-b277-69a03419ff2a tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] [instance: 629054bb-8fdb-45a2-8c07-216c4104d4a6] Reconfigured VM instance instance-0000004c to attach disk [datastore1] 629054bb-8fdb-45a2-8c07-216c4104d4a6/629054bb-8fdb-45a2-8c07-216c4104d4a6.vmdk or device None with type sparse {{(pid=62503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 897.022725] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6fc7bdd3-2007-41a4-822c-ea06ab5004d3 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.030364] env[62503]: DEBUG oslo_vmware.api [None req-409bdb25-8a95-4fcd-b277-69a03419ff2a tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Waiting for the task: (returnval){ [ 897.030364] env[62503]: value = "task-1387877" [ 897.030364] env[62503]: _type = "Task" [ 897.030364] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 897.043881] env[62503]: DEBUG oslo_vmware.api [None req-409bdb25-8a95-4fcd-b277-69a03419ff2a tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Task: {'id': task-1387877, 'name': Rename_Task} progress is 5%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.047158] env[62503]: DEBUG oslo_vmware.api [None req-784ff18d-15f2-4cb8-8623-ff87e0cf2977 tempest-InstanceActionsTestJSON-257954437 tempest-InstanceActionsTestJSON-257954437-project-member] Task: {'id': task-1387874, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.49922} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 897.047424] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-784ff18d-15f2-4cb8-8623-ff87e0cf2977 tempest-InstanceActionsTestJSON-257954437 tempest-InstanceActionsTestJSON-257954437-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk to [datastore1] c39e7ee3-1b97-44ec-92d6-733976c0f0f8/c39e7ee3-1b97-44ec-92d6-733976c0f0f8.vmdk {{(pid=62503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 897.047733] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-784ff18d-15f2-4cb8-8623-ff87e0cf2977 tempest-InstanceActionsTestJSON-257954437 tempest-InstanceActionsTestJSON-257954437-project-member] [instance: c39e7ee3-1b97-44ec-92d6-733976c0f0f8] Extending root virtual disk to 1048576 {{(pid=62503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 897.048029] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-dd68d234-89be-4358-b8c3-2461bd761391 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.057189] env[62503]: DEBUG oslo_vmware.api [None req-784ff18d-15f2-4cb8-8623-ff87e0cf2977 tempest-InstanceActionsTestJSON-257954437 tempest-InstanceActionsTestJSON-257954437-project-member] Waiting for the task: (returnval){ [ 897.057189] env[62503]: value = "task-1387878" [ 897.057189] env[62503]: _type = "Task" [ 897.057189] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 897.068321] env[62503]: DEBUG oslo_vmware.api [None req-784ff18d-15f2-4cb8-8623-ff87e0cf2977 tempest-InstanceActionsTestJSON-257954437 tempest-InstanceActionsTestJSON-257954437-project-member] Task: {'id': task-1387878, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.274322] env[62503]: DEBUG oslo_vmware.api [None req-bc583026-98f6-4899-b166-facb931d7a18 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Task: {'id': task-1387875, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.275227] env[62503]: DEBUG nova.network.neutron [req-8a3b026e-ec4d-43ac-907e-755b85e25e01 req-bad7c873-44e3-4b14-8b4c-8bd6aad4546b service nova] [instance: 7b8c670d-3f2a-431d-91da-4ced781e6e51] Updated VIF entry in instance network info cache for port e75a1fc0-2c4a-4103-8539-b8a7d4f9b24e. {{(pid=62503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 897.275659] env[62503]: DEBUG nova.network.neutron [req-8a3b026e-ec4d-43ac-907e-755b85e25e01 req-bad7c873-44e3-4b14-8b4c-8bd6aad4546b service nova] [instance: 7b8c670d-3f2a-431d-91da-4ced781e6e51] Updating instance_info_cache with network_info: [{"id": "e75a1fc0-2c4a-4103-8539-b8a7d4f9b24e", "address": "fa:16:3e:a7:71:7c", "network": {"id": "db32bcf6-4f31-49a6-a030-459c7952b72c", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1233897985-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eac2608fc52a497f961d018c888a826f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271a82f1-1d09-4ad3-9c15-07269bad114c", "external-id": "nsx-vlan-transportzone-441", "segmentation_id": 441, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape75a1fc0-2c", "ovs_interfaceid": "e75a1fc0-2c4a-4103-8539-b8a7d4f9b24e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 897.300017] env[62503]: DEBUG oslo_vmware.api [None req-da58a238-4e42-4269-977f-a144551c2ae1 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Task: {'id': task-1387876, 'name': CloneVM_Task} progress is 94%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.368330] env[62503]: DEBUG nova.compute.utils [None req-085a834d-69f1-4b6f-a2c6-d0f37cb5d14b tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Using /dev/sd instead of None {{(pid=62503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 897.373293] env[62503]: DEBUG nova.compute.manager [None req-085a834d-69f1-4b6f-a2c6-d0f37cb5d14b tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] [instance: e7556915-634f-40d6-9e7f-da1c3201d8e4] Allocating IP information in the background. {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 897.373491] env[62503]: DEBUG nova.network.neutron [None req-085a834d-69f1-4b6f-a2c6-d0f37cb5d14b tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] [instance: e7556915-634f-40d6-9e7f-da1c3201d8e4] allocate_for_instance() {{(pid=62503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 897.414014] env[62503]: DEBUG nova.policy [None req-085a834d-69f1-4b6f-a2c6-d0f37cb5d14b tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bbe8eb64be214c2d856e3f4576edbf72', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'eac2608fc52a497f961d018c888a826f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62503) authorize /opt/stack/nova/nova/policy.py:201}} [ 897.488554] env[62503]: DEBUG oslo_concurrency.lockutils [None req-2026f2a3-ca55-406d-af72-4e933a561068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Acquiring lock "refresh_cache-c09488ed-e354-4abf-8999-b2f8afec44fc" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 897.488926] env[62503]: DEBUG oslo_concurrency.lockutils [None req-2026f2a3-ca55-406d-af72-4e933a561068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Acquired lock "refresh_cache-c09488ed-e354-4abf-8999-b2f8afec44fc" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 897.489228] env[62503]: DEBUG nova.network.neutron [None req-2026f2a3-ca55-406d-af72-4e933a561068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: c09488ed-e354-4abf-8999-b2f8afec44fc] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 897.519745] env[62503]: DEBUG oslo_vmware.api [None req-30e8662c-df08-452b-b0da-6a8680594759 tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]525a2cd5-07c8-0b27-2a20-9cd76e46a99e, 'name': SearchDatastore_Task, 'duration_secs': 0.012648} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 897.520974] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a61844b9-e076-4c9b-809a-d83289349282 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.527340] env[62503]: DEBUG oslo_vmware.api [None req-30e8662c-df08-452b-b0da-6a8680594759 tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Waiting for the task: (returnval){ [ 897.527340] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52af7d8a-7ad3-25a2-2e0f-7d8ea5b62819" [ 897.527340] env[62503]: _type = "Task" [ 897.527340] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 897.538804] env[62503]: DEBUG oslo_vmware.api [None req-30e8662c-df08-452b-b0da-6a8680594759 tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52af7d8a-7ad3-25a2-2e0f-7d8ea5b62819, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.544518] env[62503]: DEBUG oslo_vmware.api [None req-409bdb25-8a95-4fcd-b277-69a03419ff2a tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Task: {'id': task-1387877, 'name': Rename_Task, 'duration_secs': 0.29417} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 897.545099] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-409bdb25-8a95-4fcd-b277-69a03419ff2a tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] [instance: 629054bb-8fdb-45a2-8c07-216c4104d4a6] Powering on the VM {{(pid=62503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 897.545366] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-71a4cf16-b512-4cd0-b052-13beef8d0041 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.554444] env[62503]: DEBUG oslo_vmware.api [None req-409bdb25-8a95-4fcd-b277-69a03419ff2a tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Waiting for the task: (returnval){ [ 897.554444] env[62503]: value = "task-1387879" [ 897.554444] env[62503]: _type = "Task" [ 897.554444] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 897.568735] env[62503]: DEBUG oslo_vmware.api [None req-409bdb25-8a95-4fcd-b277-69a03419ff2a tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Task: {'id': task-1387879, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.568987] env[62503]: DEBUG oslo_vmware.api [None req-784ff18d-15f2-4cb8-8623-ff87e0cf2977 tempest-InstanceActionsTestJSON-257954437 tempest-InstanceActionsTestJSON-257954437-project-member] Task: {'id': task-1387878, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.119569} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 897.569242] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-784ff18d-15f2-4cb8-8623-ff87e0cf2977 tempest-InstanceActionsTestJSON-257954437 tempest-InstanceActionsTestJSON-257954437-project-member] [instance: c39e7ee3-1b97-44ec-92d6-733976c0f0f8] Extended root virtual disk {{(pid=62503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 897.570095] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9a20dca-4e71-488e-8cb6-29cb17f8ef98 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.594587] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-784ff18d-15f2-4cb8-8623-ff87e0cf2977 tempest-InstanceActionsTestJSON-257954437 tempest-InstanceActionsTestJSON-257954437-project-member] [instance: c39e7ee3-1b97-44ec-92d6-733976c0f0f8] Reconfiguring VM instance instance-0000004d to attach disk [datastore1] c39e7ee3-1b97-44ec-92d6-733976c0f0f8/c39e7ee3-1b97-44ec-92d6-733976c0f0f8.vmdk or device None with type sparse {{(pid=62503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 897.595912] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8836a6f6-71c9-47f9-9fe4-38327d091eec {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.610705] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d82b39ff-c639-4eab-9ae2-fc4922b78e58 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.619259] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6b8e435-184e-4745-86b7-245d88651c1a {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.623431] env[62503]: DEBUG oslo_vmware.api [None req-784ff18d-15f2-4cb8-8623-ff87e0cf2977 tempest-InstanceActionsTestJSON-257954437 tempest-InstanceActionsTestJSON-257954437-project-member] Waiting for the task: (returnval){ [ 897.623431] env[62503]: value = "task-1387880" [ 897.623431] env[62503]: _type = "Task" [ 897.623431] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 897.655022] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c6805c7-8914-44ec-970c-a4bfc21e71bc {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.657212] env[62503]: DEBUG oslo_vmware.api [None req-784ff18d-15f2-4cb8-8623-ff87e0cf2977 tempest-InstanceActionsTestJSON-257954437 tempest-InstanceActionsTestJSON-257954437-project-member] Task: {'id': task-1387880, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.663189] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7963e1f0-21a5-4b35-ac93-5bcb6de4d3de {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.677160] env[62503]: DEBUG nova.compute.provider_tree [None req-4fdc7626-9734-4113-a7b4-a544a2ca22f6 tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 897.738568] env[62503]: DEBUG nova.network.neutron [None req-085a834d-69f1-4b6f-a2c6-d0f37cb5d14b tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] [instance: e7556915-634f-40d6-9e7f-da1c3201d8e4] Successfully created port: fea5a48c-97b0-4aa5-b402-a421e0392f74 {{(pid=62503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 897.772397] env[62503]: DEBUG oslo_vmware.api [None req-bc583026-98f6-4899-b166-facb931d7a18 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Task: {'id': task-1387875, 'name': PowerOnVM_Task, 'duration_secs': 0.599703} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 897.772693] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc583026-98f6-4899-b166-facb931d7a18 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: ca1f1966-bfe1-495e-b055-f72150f72470] Powered on the VM {{(pid=62503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 897.772936] env[62503]: INFO nova.compute.manager [None req-bc583026-98f6-4899-b166-facb931d7a18 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: ca1f1966-bfe1-495e-b055-f72150f72470] Took 10.31 seconds to spawn the instance on the hypervisor. [ 897.773154] env[62503]: DEBUG nova.compute.manager [None req-bc583026-98f6-4899-b166-facb931d7a18 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: ca1f1966-bfe1-495e-b055-f72150f72470] Checking state {{(pid=62503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1794}} [ 897.773999] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77f88785-25e3-445f-a450-32d84d25d8cd {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.779284] env[62503]: DEBUG oslo_concurrency.lockutils [req-8a3b026e-ec4d-43ac-907e-755b85e25e01 req-bad7c873-44e3-4b14-8b4c-8bd6aad4546b service nova] Releasing lock "refresh_cache-7b8c670d-3f2a-431d-91da-4ced781e6e51" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 897.803422] env[62503]: DEBUG oslo_vmware.api [None req-da58a238-4e42-4269-977f-a144551c2ae1 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Task: {'id': task-1387876, 'name': CloneVM_Task} progress is 94%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.878947] env[62503]: DEBUG nova.compute.manager [None req-085a834d-69f1-4b6f-a2c6-d0f37cb5d14b tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] [instance: e7556915-634f-40d6-9e7f-da1c3201d8e4] Start building block device mappings for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2867}} [ 898.035717] env[62503]: DEBUG nova.network.neutron [None req-2026f2a3-ca55-406d-af72-4e933a561068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: c09488ed-e354-4abf-8999-b2f8afec44fc] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 898.043054] env[62503]: DEBUG oslo_vmware.api [None req-30e8662c-df08-452b-b0da-6a8680594759 tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52af7d8a-7ad3-25a2-2e0f-7d8ea5b62819, 'name': SearchDatastore_Task, 'duration_secs': 0.038322} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 898.043360] env[62503]: DEBUG oslo_concurrency.lockutils [None req-30e8662c-df08-452b-b0da-6a8680594759 tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Releasing lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 898.043603] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-30e8662c-df08-452b-b0da-6a8680594759 tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk to [datastore1] 7b8c670d-3f2a-431d-91da-4ced781e6e51/7b8c670d-3f2a-431d-91da-4ced781e6e51.vmdk {{(pid=62503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 898.043869] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f6cfd03d-da22-41c7-bd9c-9a06ccee49ea {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.052703] env[62503]: DEBUG oslo_vmware.api [None req-30e8662c-df08-452b-b0da-6a8680594759 tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Waiting for the task: (returnval){ [ 898.052703] env[62503]: value = "task-1387881" [ 898.052703] env[62503]: _type = "Task" [ 898.052703] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 898.069435] env[62503]: DEBUG oslo_vmware.api [None req-30e8662c-df08-452b-b0da-6a8680594759 tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Task: {'id': task-1387881, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.073816] env[62503]: DEBUG oslo_vmware.api [None req-409bdb25-8a95-4fcd-b277-69a03419ff2a tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Task: {'id': task-1387879, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.135411] env[62503]: DEBUG oslo_vmware.api [None req-784ff18d-15f2-4cb8-8623-ff87e0cf2977 tempest-InstanceActionsTestJSON-257954437 tempest-InstanceActionsTestJSON-257954437-project-member] Task: {'id': task-1387880, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.180668] env[62503]: DEBUG nova.scheduler.client.report [None req-4fdc7626-9734-4113-a7b4-a544a2ca22f6 tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 898.284990] env[62503]: DEBUG nova.network.neutron [None req-2026f2a3-ca55-406d-af72-4e933a561068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: c09488ed-e354-4abf-8999-b2f8afec44fc] Updating instance_info_cache with network_info: [{"id": "305fe606-9e39-4466-9423-e9168de3ec21", "address": "fa:16:3e:f2:4d:1e", "network": {"id": "9b07889f-f8fb-4eed-a587-1e31b05ce03a", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1446403066-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a833cd3315d0487cb3badd7b0d330a9a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ec3f9e71-839a-429d-b211-d3dfc98ca4f6", "external-id": "nsx-vlan-transportzone-5", "segmentation_id": 5, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap305fe606-9e", "ovs_interfaceid": "305fe606-9e39-4466-9423-e9168de3ec21", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 898.303268] env[62503]: INFO nova.compute.manager [None req-bc583026-98f6-4899-b166-facb931d7a18 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: ca1f1966-bfe1-495e-b055-f72150f72470] Took 17.37 seconds to build instance. [ 898.311294] env[62503]: DEBUG oslo_vmware.api [None req-da58a238-4e42-4269-977f-a144551c2ae1 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Task: {'id': task-1387876, 'name': CloneVM_Task} progress is 95%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.565743] env[62503]: DEBUG oslo_vmware.api [None req-30e8662c-df08-452b-b0da-6a8680594759 tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Task: {'id': task-1387881, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.571543] env[62503]: DEBUG oslo_vmware.api [None req-409bdb25-8a95-4fcd-b277-69a03419ff2a tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Task: {'id': task-1387879, 'name': PowerOnVM_Task, 'duration_secs': 0.787675} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 898.571831] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-409bdb25-8a95-4fcd-b277-69a03419ff2a tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] [instance: 629054bb-8fdb-45a2-8c07-216c4104d4a6] Powered on the VM {{(pid=62503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 898.572058] env[62503]: INFO nova.compute.manager [None req-409bdb25-8a95-4fcd-b277-69a03419ff2a tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] [instance: 629054bb-8fdb-45a2-8c07-216c4104d4a6] Took 8.81 seconds to spawn the instance on the hypervisor. [ 898.572251] env[62503]: DEBUG nova.compute.manager [None req-409bdb25-8a95-4fcd-b277-69a03419ff2a tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] [instance: 629054bb-8fdb-45a2-8c07-216c4104d4a6] Checking state {{(pid=62503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1794}} [ 898.573122] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7811a304-b807-4b0d-b508-a938618c4637 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.584608] env[62503]: DEBUG nova.compute.manager [req-9f08f47b-3e5e-4443-832a-6126fc596aac req-49840707-9170-4ae1-ab3d-cde42954b989 service nova] [instance: c09488ed-e354-4abf-8999-b2f8afec44fc] Received event network-vif-plugged-305fe606-9e39-4466-9423-e9168de3ec21 {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 898.584846] env[62503]: DEBUG oslo_concurrency.lockutils [req-9f08f47b-3e5e-4443-832a-6126fc596aac req-49840707-9170-4ae1-ab3d-cde42954b989 service nova] Acquiring lock "c09488ed-e354-4abf-8999-b2f8afec44fc-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 898.585181] env[62503]: DEBUG oslo_concurrency.lockutils [req-9f08f47b-3e5e-4443-832a-6126fc596aac req-49840707-9170-4ae1-ab3d-cde42954b989 service nova] Lock "c09488ed-e354-4abf-8999-b2f8afec44fc-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 898.585255] env[62503]: DEBUG oslo_concurrency.lockutils [req-9f08f47b-3e5e-4443-832a-6126fc596aac req-49840707-9170-4ae1-ab3d-cde42954b989 service nova] Lock "c09488ed-e354-4abf-8999-b2f8afec44fc-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 898.585420] env[62503]: DEBUG nova.compute.manager [req-9f08f47b-3e5e-4443-832a-6126fc596aac req-49840707-9170-4ae1-ab3d-cde42954b989 service nova] [instance: c09488ed-e354-4abf-8999-b2f8afec44fc] No waiting events found dispatching network-vif-plugged-305fe606-9e39-4466-9423-e9168de3ec21 {{(pid=62503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 898.585590] env[62503]: WARNING nova.compute.manager [req-9f08f47b-3e5e-4443-832a-6126fc596aac req-49840707-9170-4ae1-ab3d-cde42954b989 service nova] [instance: c09488ed-e354-4abf-8999-b2f8afec44fc] Received unexpected event network-vif-plugged-305fe606-9e39-4466-9423-e9168de3ec21 for instance with vm_state building and task_state spawning. [ 898.585764] env[62503]: DEBUG nova.compute.manager [req-9f08f47b-3e5e-4443-832a-6126fc596aac req-49840707-9170-4ae1-ab3d-cde42954b989 service nova] [instance: c09488ed-e354-4abf-8999-b2f8afec44fc] Received event network-changed-305fe606-9e39-4466-9423-e9168de3ec21 {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 898.585921] env[62503]: DEBUG nova.compute.manager [req-9f08f47b-3e5e-4443-832a-6126fc596aac req-49840707-9170-4ae1-ab3d-cde42954b989 service nova] [instance: c09488ed-e354-4abf-8999-b2f8afec44fc] Refreshing instance network info cache due to event network-changed-305fe606-9e39-4466-9423-e9168de3ec21. {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11432}} [ 898.586102] env[62503]: DEBUG oslo_concurrency.lockutils [req-9f08f47b-3e5e-4443-832a-6126fc596aac req-49840707-9170-4ae1-ab3d-cde42954b989 service nova] Acquiring lock "refresh_cache-c09488ed-e354-4abf-8999-b2f8afec44fc" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 898.635165] env[62503]: DEBUG oslo_vmware.api [None req-784ff18d-15f2-4cb8-8623-ff87e0cf2977 tempest-InstanceActionsTestJSON-257954437 tempest-InstanceActionsTestJSON-257954437-project-member] Task: {'id': task-1387880, 'name': ReconfigVM_Task, 'duration_secs': 0.658863} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 898.635464] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-784ff18d-15f2-4cb8-8623-ff87e0cf2977 tempest-InstanceActionsTestJSON-257954437 tempest-InstanceActionsTestJSON-257954437-project-member] [instance: c39e7ee3-1b97-44ec-92d6-733976c0f0f8] Reconfigured VM instance instance-0000004d to attach disk [datastore1] c39e7ee3-1b97-44ec-92d6-733976c0f0f8/c39e7ee3-1b97-44ec-92d6-733976c0f0f8.vmdk or device None with type sparse {{(pid=62503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 898.636159] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ed3d6946-08e3-479f-9aca-8132e9625d37 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.642796] env[62503]: DEBUG oslo_vmware.api [None req-784ff18d-15f2-4cb8-8623-ff87e0cf2977 tempest-InstanceActionsTestJSON-257954437 tempest-InstanceActionsTestJSON-257954437-project-member] Waiting for the task: (returnval){ [ 898.642796] env[62503]: value = "task-1387882" [ 898.642796] env[62503]: _type = "Task" [ 898.642796] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 898.651490] env[62503]: DEBUG oslo_vmware.api [None req-784ff18d-15f2-4cb8-8623-ff87e0cf2977 tempest-InstanceActionsTestJSON-257954437 tempest-InstanceActionsTestJSON-257954437-project-member] Task: {'id': task-1387882, 'name': Rename_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.687083] env[62503]: DEBUG oslo_concurrency.lockutils [None req-4fdc7626-9734-4113-a7b4-a544a2ca22f6 tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.822s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 898.689423] env[62503]: DEBUG oslo_concurrency.lockutils [None req-55afd046-fdf9-4041-8df2-ea3cb62954c4 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 5.442s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 898.714856] env[62503]: INFO nova.scheduler.client.report [None req-4fdc7626-9734-4113-a7b4-a544a2ca22f6 tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] Deleted allocations for instance cf611345-d276-4745-a2f8-0551c9dca2c2 [ 898.787596] env[62503]: DEBUG oslo_concurrency.lockutils [None req-2026f2a3-ca55-406d-af72-4e933a561068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Releasing lock "refresh_cache-c09488ed-e354-4abf-8999-b2f8afec44fc" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 898.788750] env[62503]: DEBUG nova.compute.manager [None req-2026f2a3-ca55-406d-af72-4e933a561068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: c09488ed-e354-4abf-8999-b2f8afec44fc] Instance network_info: |[{"id": "305fe606-9e39-4466-9423-e9168de3ec21", "address": "fa:16:3e:f2:4d:1e", "network": {"id": "9b07889f-f8fb-4eed-a587-1e31b05ce03a", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1446403066-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a833cd3315d0487cb3badd7b0d330a9a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ec3f9e71-839a-429d-b211-d3dfc98ca4f6", "external-id": "nsx-vlan-transportzone-5", "segmentation_id": 5, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap305fe606-9e", "ovs_interfaceid": "305fe606-9e39-4466-9423-e9168de3ec21", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1999}} [ 898.788750] env[62503]: DEBUG oslo_concurrency.lockutils [req-9f08f47b-3e5e-4443-832a-6126fc596aac req-49840707-9170-4ae1-ab3d-cde42954b989 service nova] Acquired lock "refresh_cache-c09488ed-e354-4abf-8999-b2f8afec44fc" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 898.788750] env[62503]: DEBUG nova.network.neutron [req-9f08f47b-3e5e-4443-832a-6126fc596aac req-49840707-9170-4ae1-ab3d-cde42954b989 service nova] [instance: c09488ed-e354-4abf-8999-b2f8afec44fc] Refreshing network info cache for port 305fe606-9e39-4466-9423-e9168de3ec21 {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 898.789583] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-2026f2a3-ca55-406d-af72-4e933a561068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: c09488ed-e354-4abf-8999-b2f8afec44fc] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f2:4d:1e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ec3f9e71-839a-429d-b211-d3dfc98ca4f6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '305fe606-9e39-4466-9423-e9168de3ec21', 'vif_model': 'vmxnet3'}] {{(pid=62503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 898.797313] env[62503]: DEBUG oslo.service.loopingcall [None req-2026f2a3-ca55-406d-af72-4e933a561068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 898.800280] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c09488ed-e354-4abf-8999-b2f8afec44fc] Creating VM on the ESX host {{(pid=62503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 898.804159] env[62503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-33d92846-98c7-41cf-9d14-0a0e6f3215e3 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.819072] env[62503]: DEBUG oslo_concurrency.lockutils [None req-bc583026-98f6-4899-b166-facb931d7a18 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Lock "ca1f1966-bfe1-495e-b055-f72150f72470" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.891s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 898.825443] env[62503]: DEBUG oslo_vmware.api [None req-da58a238-4e42-4269-977f-a144551c2ae1 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Task: {'id': task-1387876, 'name': CloneVM_Task, 'duration_secs': 1.726505} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 898.826706] env[62503]: INFO nova.virt.vmwareapi.vmops [None req-da58a238-4e42-4269-977f-a144551c2ae1 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: 16167e53-e45b-4b37-90c6-ab2c30ebf1aa] Created linked-clone VM from snapshot [ 898.826948] env[62503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 898.826948] env[62503]: value = "task-1387883" [ 898.826948] env[62503]: _type = "Task" [ 898.826948] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 898.827909] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca9aa5e5-4bb7-413a-abf5-91d52b8079f1 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.838390] env[62503]: DEBUG nova.virt.vmwareapi.images [None req-da58a238-4e42-4269-977f-a144551c2ae1 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: 16167e53-e45b-4b37-90c6-ab2c30ebf1aa] Uploading image 7cbf51d8-1806-40f1-8e00-1394fe0bcfc8 {{(pid=62503) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 898.843103] env[62503]: DEBUG oslo_vmware.api [-] Task: {'id': task-1387883, 'name': CreateVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.864144] env[62503]: DEBUG oslo_vmware.rw_handles [None req-da58a238-4e42-4269-977f-a144551c2ae1 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 898.864144] env[62503]: value = "vm-294603" [ 898.864144] env[62503]: _type = "VirtualMachine" [ 898.864144] env[62503]: }. {{(pid=62503) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 898.864430] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-ef1ca57a-20c7-4d25-ab69-4c45e922ecab {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.870975] env[62503]: DEBUG oslo_vmware.rw_handles [None req-da58a238-4e42-4269-977f-a144551c2ae1 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Lease: (returnval){ [ 898.870975] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52d481ff-dc14-133a-958a-d7e02ae1b69f" [ 898.870975] env[62503]: _type = "HttpNfcLease" [ 898.870975] env[62503]: } obtained for exporting VM: (result){ [ 898.870975] env[62503]: value = "vm-294603" [ 898.870975] env[62503]: _type = "VirtualMachine" [ 898.870975] env[62503]: }. {{(pid=62503) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 898.871285] env[62503]: DEBUG oslo_vmware.api [None req-da58a238-4e42-4269-977f-a144551c2ae1 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Waiting for the lease: (returnval){ [ 898.871285] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52d481ff-dc14-133a-958a-d7e02ae1b69f" [ 898.871285] env[62503]: _type = "HttpNfcLease" [ 898.871285] env[62503]: } to be ready. {{(pid=62503) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 898.881324] env[62503]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 898.881324] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52d481ff-dc14-133a-958a-d7e02ae1b69f" [ 898.881324] env[62503]: _type = "HttpNfcLease" [ 898.881324] env[62503]: } is initializing. {{(pid=62503) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 898.886746] env[62503]: DEBUG nova.compute.manager [None req-085a834d-69f1-4b6f-a2c6-d0f37cb5d14b tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] [instance: e7556915-634f-40d6-9e7f-da1c3201d8e4] Start spawning the instance on the hypervisor. {{(pid=62503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2641}} [ 898.911501] env[62503]: DEBUG nova.virt.hardware [None req-085a834d-69f1-4b6f-a2c6-d0f37cb5d14b tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:26:22Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:26:03Z,direct_url=,disk_format='vmdk',id=8150ca02-f879-471d-8913-459408f127a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26d9ee75d25b4018b8bb1fb11c8bd98c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:26:04Z,virtual_size=,visibility=), allow threads: False {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 898.911780] env[62503]: DEBUG nova.virt.hardware [None req-085a834d-69f1-4b6f-a2c6-d0f37cb5d14b tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Flavor limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 898.911977] env[62503]: DEBUG nova.virt.hardware [None req-085a834d-69f1-4b6f-a2c6-d0f37cb5d14b tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Image limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 898.912205] env[62503]: DEBUG nova.virt.hardware [None req-085a834d-69f1-4b6f-a2c6-d0f37cb5d14b tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Flavor pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 898.912387] env[62503]: DEBUG nova.virt.hardware [None req-085a834d-69f1-4b6f-a2c6-d0f37cb5d14b tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Image pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 898.912575] env[62503]: DEBUG nova.virt.hardware [None req-085a834d-69f1-4b6f-a2c6-d0f37cb5d14b tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 898.912832] env[62503]: DEBUG nova.virt.hardware [None req-085a834d-69f1-4b6f-a2c6-d0f37cb5d14b tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 898.913142] env[62503]: DEBUG nova.virt.hardware [None req-085a834d-69f1-4b6f-a2c6-d0f37cb5d14b tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 898.913396] env[62503]: DEBUG nova.virt.hardware [None req-085a834d-69f1-4b6f-a2c6-d0f37cb5d14b tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Got 1 possible topologies {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 898.913604] env[62503]: DEBUG nova.virt.hardware [None req-085a834d-69f1-4b6f-a2c6-d0f37cb5d14b tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 898.913820] env[62503]: DEBUG nova.virt.hardware [None req-085a834d-69f1-4b6f-a2c6-d0f37cb5d14b tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 898.915074] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8301881-b21a-4af2-bb30-e9460cfb96bb {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.925770] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91940216-c8d9-4f16-847b-0a783c6f3626 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.064411] env[62503]: DEBUG oslo_vmware.api [None req-30e8662c-df08-452b-b0da-6a8680594759 tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Task: {'id': task-1387881, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.620149} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 899.064739] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-30e8662c-df08-452b-b0da-6a8680594759 tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk to [datastore1] 7b8c670d-3f2a-431d-91da-4ced781e6e51/7b8c670d-3f2a-431d-91da-4ced781e6e51.vmdk {{(pid=62503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 899.065013] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-30e8662c-df08-452b-b0da-6a8680594759 tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] [instance: 7b8c670d-3f2a-431d-91da-4ced781e6e51] Extending root virtual disk to 1048576 {{(pid=62503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 899.065307] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ba8c0bb4-f590-4c16-b976-fc84aa6ed98e {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.071719] env[62503]: DEBUG oslo_vmware.api [None req-30e8662c-df08-452b-b0da-6a8680594759 tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Waiting for the task: (returnval){ [ 899.071719] env[62503]: value = "task-1387885" [ 899.071719] env[62503]: _type = "Task" [ 899.071719] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.072575] env[62503]: DEBUG nova.network.neutron [req-9f08f47b-3e5e-4443-832a-6126fc596aac req-49840707-9170-4ae1-ab3d-cde42954b989 service nova] [instance: c09488ed-e354-4abf-8999-b2f8afec44fc] Updated VIF entry in instance network info cache for port 305fe606-9e39-4466-9423-e9168de3ec21. {{(pid=62503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 899.072979] env[62503]: DEBUG nova.network.neutron [req-9f08f47b-3e5e-4443-832a-6126fc596aac req-49840707-9170-4ae1-ab3d-cde42954b989 service nova] [instance: c09488ed-e354-4abf-8999-b2f8afec44fc] Updating instance_info_cache with network_info: [{"id": "305fe606-9e39-4466-9423-e9168de3ec21", "address": "fa:16:3e:f2:4d:1e", "network": {"id": "9b07889f-f8fb-4eed-a587-1e31b05ce03a", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1446403066-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a833cd3315d0487cb3badd7b0d330a9a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ec3f9e71-839a-429d-b211-d3dfc98ca4f6", "external-id": "nsx-vlan-transportzone-5", "segmentation_id": 5, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap305fe606-9e", "ovs_interfaceid": "305fe606-9e39-4466-9423-e9168de3ec21", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 899.082157] env[62503]: DEBUG oslo_vmware.api [None req-30e8662c-df08-452b-b0da-6a8680594759 tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Task: {'id': task-1387885, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.096589] env[62503]: INFO nova.compute.manager [None req-409bdb25-8a95-4fcd-b277-69a03419ff2a tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] [instance: 629054bb-8fdb-45a2-8c07-216c4104d4a6] Took 16.14 seconds to build instance. [ 899.153130] env[62503]: DEBUG oslo_vmware.api [None req-784ff18d-15f2-4cb8-8623-ff87e0cf2977 tempest-InstanceActionsTestJSON-257954437 tempest-InstanceActionsTestJSON-257954437-project-member] Task: {'id': task-1387882, 'name': Rename_Task, 'duration_secs': 0.30885} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 899.153729] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-784ff18d-15f2-4cb8-8623-ff87e0cf2977 tempest-InstanceActionsTestJSON-257954437 tempest-InstanceActionsTestJSON-257954437-project-member] [instance: c39e7ee3-1b97-44ec-92d6-733976c0f0f8] Powering on the VM {{(pid=62503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 899.154075] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4f6f6856-091e-4858-bb4c-db4eea1e37a3 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.160832] env[62503]: DEBUG oslo_vmware.api [None req-784ff18d-15f2-4cb8-8623-ff87e0cf2977 tempest-InstanceActionsTestJSON-257954437 tempest-InstanceActionsTestJSON-257954437-project-member] Waiting for the task: (returnval){ [ 899.160832] env[62503]: value = "task-1387886" [ 899.160832] env[62503]: _type = "Task" [ 899.160832] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.169187] env[62503]: DEBUG oslo_vmware.api [None req-784ff18d-15f2-4cb8-8623-ff87e0cf2977 tempest-InstanceActionsTestJSON-257954437 tempest-InstanceActionsTestJSON-257954437-project-member] Task: {'id': task-1387886, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.195737] env[62503]: INFO nova.compute.claims [None req-55afd046-fdf9-4041-8df2-ea3cb62954c4 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: c9129f68-c755-4b78-b067-b77b01048c02] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 899.221538] env[62503]: DEBUG oslo_concurrency.lockutils [None req-4fdc7626-9734-4113-a7b4-a544a2ca22f6 tempest-SecurityGroupsTestJSON-1927296854 tempest-SecurityGroupsTestJSON-1927296854-project-member] Lock "cf611345-d276-4745-a2f8-0551c9dca2c2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 12.355s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 899.286202] env[62503]: DEBUG nova.network.neutron [None req-085a834d-69f1-4b6f-a2c6-d0f37cb5d14b tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] [instance: e7556915-634f-40d6-9e7f-da1c3201d8e4] Successfully updated port: fea5a48c-97b0-4aa5-b402-a421e0392f74 {{(pid=62503) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 899.340124] env[62503]: DEBUG oslo_vmware.api [-] Task: {'id': task-1387883, 'name': CreateVM_Task} progress is 25%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.379204] env[62503]: DEBUG oslo_concurrency.lockutils [None req-36110b9f-d2a9-492b-9f90-ecb7dabac9c6 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Acquiring lock "ca1f1966-bfe1-495e-b055-f72150f72470" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 899.379478] env[62503]: DEBUG oslo_concurrency.lockutils [None req-36110b9f-d2a9-492b-9f90-ecb7dabac9c6 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Lock "ca1f1966-bfe1-495e-b055-f72150f72470" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 899.379999] env[62503]: DEBUG oslo_concurrency.lockutils [None req-36110b9f-d2a9-492b-9f90-ecb7dabac9c6 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Acquiring lock "ca1f1966-bfe1-495e-b055-f72150f72470-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 899.379999] env[62503]: DEBUG oslo_concurrency.lockutils [None req-36110b9f-d2a9-492b-9f90-ecb7dabac9c6 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Lock "ca1f1966-bfe1-495e-b055-f72150f72470-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 899.380279] env[62503]: DEBUG oslo_concurrency.lockutils [None req-36110b9f-d2a9-492b-9f90-ecb7dabac9c6 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Lock "ca1f1966-bfe1-495e-b055-f72150f72470-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 899.381651] env[62503]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 899.381651] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52d481ff-dc14-133a-958a-d7e02ae1b69f" [ 899.381651] env[62503]: _type = "HttpNfcLease" [ 899.381651] env[62503]: } is ready. {{(pid=62503) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 899.382100] env[62503]: INFO nova.compute.manager [None req-36110b9f-d2a9-492b-9f90-ecb7dabac9c6 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: ca1f1966-bfe1-495e-b055-f72150f72470] Terminating instance [ 899.383488] env[62503]: DEBUG oslo_vmware.rw_handles [None req-da58a238-4e42-4269-977f-a144551c2ae1 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 899.383488] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52d481ff-dc14-133a-958a-d7e02ae1b69f" [ 899.383488] env[62503]: _type = "HttpNfcLease" [ 899.383488] env[62503]: }. {{(pid=62503) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 899.384156] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d319bcca-f5be-433a-be3e-d2188e458302 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.386870] env[62503]: DEBUG nova.compute.manager [None req-36110b9f-d2a9-492b-9f90-ecb7dabac9c6 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: ca1f1966-bfe1-495e-b055-f72150f72470] Start destroying the instance on the hypervisor. {{(pid=62503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3156}} [ 899.387073] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-36110b9f-d2a9-492b-9f90-ecb7dabac9c6 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: ca1f1966-bfe1-495e-b055-f72150f72470] Destroying instance {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 899.387810] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3cb53ae-5c36-497b-a601-90bff7d30934 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.395350] env[62503]: DEBUG oslo_vmware.rw_handles [None req-da58a238-4e42-4269-977f-a144551c2ae1 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/528e925a-2128-e88f-9716-9f3cc2b7bf3b/disk-0.vmdk from lease info. {{(pid=62503) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 899.395350] env[62503]: DEBUG oslo_vmware.rw_handles [None req-da58a238-4e42-4269-977f-a144551c2ae1 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/528e925a-2128-e88f-9716-9f3cc2b7bf3b/disk-0.vmdk for reading. {{(pid=62503) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 899.398589] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-36110b9f-d2a9-492b-9f90-ecb7dabac9c6 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: ca1f1966-bfe1-495e-b055-f72150f72470] Powering off the VM {{(pid=62503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 899.399549] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e550d2e4-e6c8-40bd-a48e-26f5088fb673 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.460850] env[62503]: DEBUG oslo_vmware.api [None req-36110b9f-d2a9-492b-9f90-ecb7dabac9c6 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Waiting for the task: (returnval){ [ 899.460850] env[62503]: value = "task-1387887" [ 899.460850] env[62503]: _type = "Task" [ 899.460850] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.469714] env[62503]: DEBUG oslo_vmware.api [None req-36110b9f-d2a9-492b-9f90-ecb7dabac9c6 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Task: {'id': task-1387887, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.530725] env[62503]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-8eed6d56-6cb4-4443-be92-1514000773fd {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.579035] env[62503]: DEBUG oslo_concurrency.lockutils [req-9f08f47b-3e5e-4443-832a-6126fc596aac req-49840707-9170-4ae1-ab3d-cde42954b989 service nova] Releasing lock "refresh_cache-c09488ed-e354-4abf-8999-b2f8afec44fc" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 899.584938] env[62503]: DEBUG oslo_vmware.api [None req-30e8662c-df08-452b-b0da-6a8680594759 tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Task: {'id': task-1387885, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.101474} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 899.585240] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-30e8662c-df08-452b-b0da-6a8680594759 tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] [instance: 7b8c670d-3f2a-431d-91da-4ced781e6e51] Extended root virtual disk {{(pid=62503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 899.586049] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0bb0957-ebf2-47df-a9dd-952a8153a77f {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.607901] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-30e8662c-df08-452b-b0da-6a8680594759 tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] [instance: 7b8c670d-3f2a-431d-91da-4ced781e6e51] Reconfiguring VM instance instance-0000004e to attach disk [datastore1] 7b8c670d-3f2a-431d-91da-4ced781e6e51/7b8c670d-3f2a-431d-91da-4ced781e6e51.vmdk or device None with type sparse {{(pid=62503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 899.608379] env[62503]: DEBUG oslo_concurrency.lockutils [None req-409bdb25-8a95-4fcd-b277-69a03419ff2a tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Lock "629054bb-8fdb-45a2-8c07-216c4104d4a6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.661s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 899.608624] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f3b06793-fea6-4ad9-ac06-699c8903b82f {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.627544] env[62503]: DEBUG oslo_vmware.api [None req-30e8662c-df08-452b-b0da-6a8680594759 tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Waiting for the task: (returnval){ [ 899.627544] env[62503]: value = "task-1387888" [ 899.627544] env[62503]: _type = "Task" [ 899.627544] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.635699] env[62503]: DEBUG oslo_vmware.api [None req-30e8662c-df08-452b-b0da-6a8680594759 tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Task: {'id': task-1387888, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.671249] env[62503]: DEBUG oslo_vmware.api [None req-784ff18d-15f2-4cb8-8623-ff87e0cf2977 tempest-InstanceActionsTestJSON-257954437 tempest-InstanceActionsTestJSON-257954437-project-member] Task: {'id': task-1387886, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.703376] env[62503]: INFO nova.compute.resource_tracker [None req-55afd046-fdf9-4041-8df2-ea3cb62954c4 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: c9129f68-c755-4b78-b067-b77b01048c02] Updating resource usage from migration be380929-2664-4c9b-ad25-ae71398e93d8 [ 899.790028] env[62503]: DEBUG oslo_concurrency.lockutils [None req-085a834d-69f1-4b6f-a2c6-d0f37cb5d14b tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Acquiring lock "refresh_cache-e7556915-634f-40d6-9e7f-da1c3201d8e4" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 899.790179] env[62503]: DEBUG oslo_concurrency.lockutils [None req-085a834d-69f1-4b6f-a2c6-d0f37cb5d14b tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Acquired lock "refresh_cache-e7556915-634f-40d6-9e7f-da1c3201d8e4" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 899.790310] env[62503]: DEBUG nova.network.neutron [None req-085a834d-69f1-4b6f-a2c6-d0f37cb5d14b tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] [instance: e7556915-634f-40d6-9e7f-da1c3201d8e4] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 899.845592] env[62503]: DEBUG oslo_vmware.api [-] Task: {'id': task-1387883, 'name': CreateVM_Task, 'duration_secs': 0.760789} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 899.846538] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c09488ed-e354-4abf-8999-b2f8afec44fc] Created VM on the ESX host {{(pid=62503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 899.846701] env[62503]: DEBUG oslo_concurrency.lockutils [None req-2026f2a3-ca55-406d-af72-4e933a561068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3306da0e-1d43-4f11-be11-5fe7cf1194eb" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 899.846932] env[62503]: DEBUG oslo_concurrency.lockutils [None req-2026f2a3-ca55-406d-af72-4e933a561068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3306da0e-1d43-4f11-be11-5fe7cf1194eb" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 899.847655] env[62503]: DEBUG oslo_concurrency.lockutils [None req-2026f2a3-ca55-406d-af72-4e933a561068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/3306da0e-1d43-4f11-be11-5fe7cf1194eb" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 899.847925] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ffdca673-d1df-4af9-b048-b251180adf79 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.855034] env[62503]: DEBUG oslo_vmware.api [None req-2026f2a3-ca55-406d-af72-4e933a561068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Waiting for the task: (returnval){ [ 899.855034] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52d33ccf-17a9-cfe7-30ec-ab9335e51cf3" [ 899.855034] env[62503]: _type = "Task" [ 899.855034] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.863997] env[62503]: DEBUG oslo_vmware.api [None req-2026f2a3-ca55-406d-af72-4e933a561068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52d33ccf-17a9-cfe7-30ec-ab9335e51cf3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.893726] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fac764a6-227f-4693-a769-df5f6628b05e {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.903041] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d92c7642-c798-4cbd-815f-5b064582b7dc {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.937768] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f09ceeb-e84d-4cb3-957a-0798c278d7a4 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.945903] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b218bfc0-23e4-4e0c-bea2-959c7941d510 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.967162] env[62503]: DEBUG nova.compute.provider_tree [None req-55afd046-fdf9-4041-8df2-ea3cb62954c4 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 899.980220] env[62503]: DEBUG oslo_vmware.api [None req-36110b9f-d2a9-492b-9f90-ecb7dabac9c6 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Task: {'id': task-1387887, 'name': PowerOffVM_Task, 'duration_secs': 0.224036} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 899.980220] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-36110b9f-d2a9-492b-9f90-ecb7dabac9c6 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: ca1f1966-bfe1-495e-b055-f72150f72470] Powered off the VM {{(pid=62503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 899.980220] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-36110b9f-d2a9-492b-9f90-ecb7dabac9c6 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: ca1f1966-bfe1-495e-b055-f72150f72470] Unregistering the VM {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 899.980220] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-86567949-7afc-46d0-83b4-f54b2fef5dca {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.070802] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-36110b9f-d2a9-492b-9f90-ecb7dabac9c6 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: ca1f1966-bfe1-495e-b055-f72150f72470] Unregistered the VM {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 900.070802] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-36110b9f-d2a9-492b-9f90-ecb7dabac9c6 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: ca1f1966-bfe1-495e-b055-f72150f72470] Deleting contents of the VM from datastore datastore2 {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 900.070802] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-36110b9f-d2a9-492b-9f90-ecb7dabac9c6 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Deleting the datastore file [datastore2] ca1f1966-bfe1-495e-b055-f72150f72470 {{(pid=62503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 900.071033] env[62503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-131d6c72-ffb6-46c8-b402-1dc782ebc267 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.078566] env[62503]: DEBUG oslo_vmware.api [None req-36110b9f-d2a9-492b-9f90-ecb7dabac9c6 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Waiting for the task: (returnval){ [ 900.078566] env[62503]: value = "task-1387890" [ 900.078566] env[62503]: _type = "Task" [ 900.078566] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.086744] env[62503]: DEBUG oslo_vmware.api [None req-36110b9f-d2a9-492b-9f90-ecb7dabac9c6 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Task: {'id': task-1387890, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.137099] env[62503]: DEBUG oslo_vmware.api [None req-30e8662c-df08-452b-b0da-6a8680594759 tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Task: {'id': task-1387888, 'name': ReconfigVM_Task, 'duration_secs': 0.383074} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.137609] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-30e8662c-df08-452b-b0da-6a8680594759 tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] [instance: 7b8c670d-3f2a-431d-91da-4ced781e6e51] Reconfigured VM instance instance-0000004e to attach disk [datastore1] 7b8c670d-3f2a-431d-91da-4ced781e6e51/7b8c670d-3f2a-431d-91da-4ced781e6e51.vmdk or device None with type sparse {{(pid=62503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 900.138352] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-24cdf359-568a-4b3d-8288-6dc4c48cc401 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.145674] env[62503]: DEBUG oslo_vmware.api [None req-30e8662c-df08-452b-b0da-6a8680594759 tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Waiting for the task: (returnval){ [ 900.145674] env[62503]: value = "task-1387891" [ 900.145674] env[62503]: _type = "Task" [ 900.145674] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.153733] env[62503]: DEBUG oslo_vmware.api [None req-30e8662c-df08-452b-b0da-6a8680594759 tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Task: {'id': task-1387891, 'name': Rename_Task} progress is 5%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.171730] env[62503]: DEBUG oslo_vmware.api [None req-784ff18d-15f2-4cb8-8623-ff87e0cf2977 tempest-InstanceActionsTestJSON-257954437 tempest-InstanceActionsTestJSON-257954437-project-member] Task: {'id': task-1387886, 'name': PowerOnVM_Task, 'duration_secs': 0.720917} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.172079] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-784ff18d-15f2-4cb8-8623-ff87e0cf2977 tempest-InstanceActionsTestJSON-257954437 tempest-InstanceActionsTestJSON-257954437-project-member] [instance: c39e7ee3-1b97-44ec-92d6-733976c0f0f8] Powered on the VM {{(pid=62503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 900.172313] env[62503]: INFO nova.compute.manager [None req-784ff18d-15f2-4cb8-8623-ff87e0cf2977 tempest-InstanceActionsTestJSON-257954437 tempest-InstanceActionsTestJSON-257954437-project-member] [instance: c39e7ee3-1b97-44ec-92d6-733976c0f0f8] Took 8.14 seconds to spawn the instance on the hypervisor. [ 900.172523] env[62503]: DEBUG nova.compute.manager [None req-784ff18d-15f2-4cb8-8623-ff87e0cf2977 tempest-InstanceActionsTestJSON-257954437 tempest-InstanceActionsTestJSON-257954437-project-member] [instance: c39e7ee3-1b97-44ec-92d6-733976c0f0f8] Checking state {{(pid=62503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1794}} [ 900.173418] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07956410-9b9f-427b-9685-bf36370d1c9f {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.338979] env[62503]: DEBUG nova.network.neutron [None req-085a834d-69f1-4b6f-a2c6-d0f37cb5d14b tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] [instance: e7556915-634f-40d6-9e7f-da1c3201d8e4] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 900.370550] env[62503]: DEBUG oslo_concurrency.lockutils [None req-2026f2a3-ca55-406d-af72-4e933a561068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3306da0e-1d43-4f11-be11-5fe7cf1194eb" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 900.370848] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-2026f2a3-ca55-406d-af72-4e933a561068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: c09488ed-e354-4abf-8999-b2f8afec44fc] Processing image 3306da0e-1d43-4f11-be11-5fe7cf1194eb {{(pid=62503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 900.371113] env[62503]: DEBUG oslo_concurrency.lockutils [None req-2026f2a3-ca55-406d-af72-4e933a561068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3306da0e-1d43-4f11-be11-5fe7cf1194eb/3306da0e-1d43-4f11-be11-5fe7cf1194eb.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 900.371268] env[62503]: DEBUG oslo_concurrency.lockutils [None req-2026f2a3-ca55-406d-af72-4e933a561068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3306da0e-1d43-4f11-be11-5fe7cf1194eb/3306da0e-1d43-4f11-be11-5fe7cf1194eb.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 900.371471] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-2026f2a3-ca55-406d-af72-4e933a561068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 900.371764] env[62503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-89de8d34-6dbb-4c6b-9f95-d81a8fe2b078 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.380314] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-2026f2a3-ca55-406d-af72-4e933a561068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 900.380546] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-2026f2a3-ca55-406d-af72-4e933a561068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 900.381313] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8a079b27-f527-4778-99f1-dbf6ddbdfd6c {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.386835] env[62503]: DEBUG oslo_vmware.api [None req-2026f2a3-ca55-406d-af72-4e933a561068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Waiting for the task: (returnval){ [ 900.386835] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52bf31e8-6f8d-fc42-0105-d6f64a3023db" [ 900.386835] env[62503]: _type = "Task" [ 900.386835] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.394727] env[62503]: DEBUG oslo_vmware.api [None req-2026f2a3-ca55-406d-af72-4e933a561068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52bf31e8-6f8d-fc42-0105-d6f64a3023db, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.475230] env[62503]: DEBUG nova.scheduler.client.report [None req-55afd046-fdf9-4041-8df2-ea3cb62954c4 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 900.540138] env[62503]: DEBUG nova.network.neutron [None req-085a834d-69f1-4b6f-a2c6-d0f37cb5d14b tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] [instance: e7556915-634f-40d6-9e7f-da1c3201d8e4] Updating instance_info_cache with network_info: [{"id": "fea5a48c-97b0-4aa5-b402-a421e0392f74", "address": "fa:16:3e:cb:98:b5", "network": {"id": "db32bcf6-4f31-49a6-a030-459c7952b72c", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1233897985-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eac2608fc52a497f961d018c888a826f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271a82f1-1d09-4ad3-9c15-07269bad114c", "external-id": "nsx-vlan-transportzone-441", "segmentation_id": 441, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfea5a48c-97", "ovs_interfaceid": "fea5a48c-97b0-4aa5-b402-a421e0392f74", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 900.591624] env[62503]: DEBUG oslo_vmware.api [None req-36110b9f-d2a9-492b-9f90-ecb7dabac9c6 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Task: {'id': task-1387890, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.300908} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.591624] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-36110b9f-d2a9-492b-9f90-ecb7dabac9c6 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Deleted the datastore file {{(pid=62503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 900.591624] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-36110b9f-d2a9-492b-9f90-ecb7dabac9c6 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: ca1f1966-bfe1-495e-b055-f72150f72470] Deleted contents of the VM from datastore datastore2 {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 900.591624] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-36110b9f-d2a9-492b-9f90-ecb7dabac9c6 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: ca1f1966-bfe1-495e-b055-f72150f72470] Instance destroyed {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 900.591624] env[62503]: INFO nova.compute.manager [None req-36110b9f-d2a9-492b-9f90-ecb7dabac9c6 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: ca1f1966-bfe1-495e-b055-f72150f72470] Took 1.20 seconds to destroy the instance on the hypervisor. [ 900.591624] env[62503]: DEBUG oslo.service.loopingcall [None req-36110b9f-d2a9-492b-9f90-ecb7dabac9c6 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 900.591624] env[62503]: DEBUG nova.compute.manager [-] [instance: ca1f1966-bfe1-495e-b055-f72150f72470] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 900.591624] env[62503]: DEBUG nova.network.neutron [-] [instance: ca1f1966-bfe1-495e-b055-f72150f72470] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 900.615530] env[62503]: DEBUG nova.compute.manager [req-ca087d18-3f3c-41ad-bb0e-4676dcb51871 req-8515511c-8cb8-4c40-9519-b77ceca4833e service nova] [instance: e7556915-634f-40d6-9e7f-da1c3201d8e4] Received event network-vif-plugged-fea5a48c-97b0-4aa5-b402-a421e0392f74 {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 900.616391] env[62503]: DEBUG oslo_concurrency.lockutils [req-ca087d18-3f3c-41ad-bb0e-4676dcb51871 req-8515511c-8cb8-4c40-9519-b77ceca4833e service nova] Acquiring lock "e7556915-634f-40d6-9e7f-da1c3201d8e4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 900.616769] env[62503]: DEBUG oslo_concurrency.lockutils [req-ca087d18-3f3c-41ad-bb0e-4676dcb51871 req-8515511c-8cb8-4c40-9519-b77ceca4833e service nova] Lock "e7556915-634f-40d6-9e7f-da1c3201d8e4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 900.617378] env[62503]: DEBUG oslo_concurrency.lockutils [req-ca087d18-3f3c-41ad-bb0e-4676dcb51871 req-8515511c-8cb8-4c40-9519-b77ceca4833e service nova] Lock "e7556915-634f-40d6-9e7f-da1c3201d8e4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 900.617787] env[62503]: DEBUG nova.compute.manager [req-ca087d18-3f3c-41ad-bb0e-4676dcb51871 req-8515511c-8cb8-4c40-9519-b77ceca4833e service nova] [instance: e7556915-634f-40d6-9e7f-da1c3201d8e4] No waiting events found dispatching network-vif-plugged-fea5a48c-97b0-4aa5-b402-a421e0392f74 {{(pid=62503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 900.618140] env[62503]: WARNING nova.compute.manager [req-ca087d18-3f3c-41ad-bb0e-4676dcb51871 req-8515511c-8cb8-4c40-9519-b77ceca4833e service nova] [instance: e7556915-634f-40d6-9e7f-da1c3201d8e4] Received unexpected event network-vif-plugged-fea5a48c-97b0-4aa5-b402-a421e0392f74 for instance with vm_state building and task_state spawning. [ 900.618474] env[62503]: DEBUG nova.compute.manager [req-ca087d18-3f3c-41ad-bb0e-4676dcb51871 req-8515511c-8cb8-4c40-9519-b77ceca4833e service nova] [instance: e7556915-634f-40d6-9e7f-da1c3201d8e4] Received event network-changed-fea5a48c-97b0-4aa5-b402-a421e0392f74 {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 900.618783] env[62503]: DEBUG nova.compute.manager [req-ca087d18-3f3c-41ad-bb0e-4676dcb51871 req-8515511c-8cb8-4c40-9519-b77ceca4833e service nova] [instance: e7556915-634f-40d6-9e7f-da1c3201d8e4] Refreshing instance network info cache due to event network-changed-fea5a48c-97b0-4aa5-b402-a421e0392f74. {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11432}} [ 900.619593] env[62503]: DEBUG oslo_concurrency.lockutils [req-ca087d18-3f3c-41ad-bb0e-4676dcb51871 req-8515511c-8cb8-4c40-9519-b77ceca4833e service nova] Acquiring lock "refresh_cache-e7556915-634f-40d6-9e7f-da1c3201d8e4" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 900.655720] env[62503]: DEBUG oslo_vmware.api [None req-30e8662c-df08-452b-b0da-6a8680594759 tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Task: {'id': task-1387891, 'name': Rename_Task, 'duration_secs': 0.222799} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.658383] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-30e8662c-df08-452b-b0da-6a8680594759 tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] [instance: 7b8c670d-3f2a-431d-91da-4ced781e6e51] Powering on the VM {{(pid=62503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 900.658774] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ca14f520-8aa6-46cc-a846-4ccbe87dfbe0 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.665739] env[62503]: DEBUG oslo_vmware.api [None req-30e8662c-df08-452b-b0da-6a8680594759 tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Waiting for the task: (returnval){ [ 900.665739] env[62503]: value = "task-1387892" [ 900.665739] env[62503]: _type = "Task" [ 900.665739] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.673804] env[62503]: DEBUG oslo_vmware.api [None req-30e8662c-df08-452b-b0da-6a8680594759 tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Task: {'id': task-1387892, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.693943] env[62503]: INFO nova.compute.manager [None req-784ff18d-15f2-4cb8-8623-ff87e0cf2977 tempest-InstanceActionsTestJSON-257954437 tempest-InstanceActionsTestJSON-257954437-project-member] [instance: c39e7ee3-1b97-44ec-92d6-733976c0f0f8] Took 17.72 seconds to build instance. [ 900.903396] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-2026f2a3-ca55-406d-af72-4e933a561068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: c09488ed-e354-4abf-8999-b2f8afec44fc] Preparing fetch location {{(pid=62503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 900.903672] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-2026f2a3-ca55-406d-af72-4e933a561068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: c09488ed-e354-4abf-8999-b2f8afec44fc] Fetch image to [datastore1] OSTACK_IMG_95a03411-c0a6-48d9-9cd9-1e3e266f3bce/OSTACK_IMG_95a03411-c0a6-48d9-9cd9-1e3e266f3bce.vmdk {{(pid=62503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 900.903868] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-2026f2a3-ca55-406d-af72-4e933a561068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: c09488ed-e354-4abf-8999-b2f8afec44fc] Downloading stream optimized image 3306da0e-1d43-4f11-be11-5fe7cf1194eb to [datastore1] OSTACK_IMG_95a03411-c0a6-48d9-9cd9-1e3e266f3bce/OSTACK_IMG_95a03411-c0a6-48d9-9cd9-1e3e266f3bce.vmdk on the data store datastore1 as vApp {{(pid=62503) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 900.904850] env[62503]: DEBUG nova.virt.vmwareapi.images [None req-2026f2a3-ca55-406d-af72-4e933a561068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: c09488ed-e354-4abf-8999-b2f8afec44fc] Downloading image file data 3306da0e-1d43-4f11-be11-5fe7cf1194eb to the ESX as VM named 'OSTACK_IMG_95a03411-c0a6-48d9-9cd9-1e3e266f3bce' {{(pid=62503) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 900.980948] env[62503]: DEBUG oslo_concurrency.lockutils [None req-55afd046-fdf9-4041-8df2-ea3cb62954c4 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.291s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 900.981187] env[62503]: INFO nova.compute.manager [None req-55afd046-fdf9-4041-8df2-ea3cb62954c4 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: c9129f68-c755-4b78-b067-b77b01048c02] Migrating [ 900.981444] env[62503]: DEBUG oslo_concurrency.lockutils [None req-55afd046-fdf9-4041-8df2-ea3cb62954c4 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Acquiring lock "compute-rpcapi-router" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 900.981621] env[62503]: DEBUG oslo_concurrency.lockutils [None req-55afd046-fdf9-4041-8df2-ea3cb62954c4 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Acquired lock "compute-rpcapi-router" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 900.990210] env[62503]: DEBUG oslo_vmware.rw_handles [None req-2026f2a3-ca55-406d-af72-4e933a561068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 900.990210] env[62503]: value = "resgroup-9" [ 900.990210] env[62503]: _type = "ResourcePool" [ 900.990210] env[62503]: }. {{(pid=62503) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 900.990752] env[62503]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-940992e7-9bb7-45c6-ade0-23e565a35ea4 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.016267] env[62503]: DEBUG oslo_vmware.rw_handles [None req-2026f2a3-ca55-406d-af72-4e933a561068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Lease: (returnval){ [ 901.016267] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]5272feab-2f18-7e8e-8464-0e76e6542452" [ 901.016267] env[62503]: _type = "HttpNfcLease" [ 901.016267] env[62503]: } obtained for vApp import into resource pool (val){ [ 901.016267] env[62503]: value = "resgroup-9" [ 901.016267] env[62503]: _type = "ResourcePool" [ 901.016267] env[62503]: }. {{(pid=62503) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 901.016585] env[62503]: DEBUG oslo_vmware.api [None req-2026f2a3-ca55-406d-af72-4e933a561068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Waiting for the lease: (returnval){ [ 901.016585] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]5272feab-2f18-7e8e-8464-0e76e6542452" [ 901.016585] env[62503]: _type = "HttpNfcLease" [ 901.016585] env[62503]: } to be ready. {{(pid=62503) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 901.026386] env[62503]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 901.026386] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]5272feab-2f18-7e8e-8464-0e76e6542452" [ 901.026386] env[62503]: _type = "HttpNfcLease" [ 901.026386] env[62503]: } is initializing. {{(pid=62503) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 901.042866] env[62503]: DEBUG oslo_concurrency.lockutils [None req-085a834d-69f1-4b6f-a2c6-d0f37cb5d14b tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Releasing lock "refresh_cache-e7556915-634f-40d6-9e7f-da1c3201d8e4" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 901.043069] env[62503]: DEBUG nova.compute.manager [None req-085a834d-69f1-4b6f-a2c6-d0f37cb5d14b tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] [instance: e7556915-634f-40d6-9e7f-da1c3201d8e4] Instance network_info: |[{"id": "fea5a48c-97b0-4aa5-b402-a421e0392f74", "address": "fa:16:3e:cb:98:b5", "network": {"id": "db32bcf6-4f31-49a6-a030-459c7952b72c", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1233897985-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eac2608fc52a497f961d018c888a826f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271a82f1-1d09-4ad3-9c15-07269bad114c", "external-id": "nsx-vlan-transportzone-441", "segmentation_id": 441, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfea5a48c-97", "ovs_interfaceid": "fea5a48c-97b0-4aa5-b402-a421e0392f74", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1999}} [ 901.043405] env[62503]: DEBUG oslo_concurrency.lockutils [req-ca087d18-3f3c-41ad-bb0e-4676dcb51871 req-8515511c-8cb8-4c40-9519-b77ceca4833e service nova] Acquired lock "refresh_cache-e7556915-634f-40d6-9e7f-da1c3201d8e4" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 901.043594] env[62503]: DEBUG nova.network.neutron [req-ca087d18-3f3c-41ad-bb0e-4676dcb51871 req-8515511c-8cb8-4c40-9519-b77ceca4833e service nova] [instance: e7556915-634f-40d6-9e7f-da1c3201d8e4] Refreshing network info cache for port fea5a48c-97b0-4aa5-b402-a421e0392f74 {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 901.045229] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-085a834d-69f1-4b6f-a2c6-d0f37cb5d14b tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] [instance: e7556915-634f-40d6-9e7f-da1c3201d8e4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:cb:98:b5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '271a82f1-1d09-4ad3-9c15-07269bad114c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fea5a48c-97b0-4aa5-b402-a421e0392f74', 'vif_model': 'vmxnet3'}] {{(pid=62503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 901.052762] env[62503]: DEBUG oslo.service.loopingcall [None req-085a834d-69f1-4b6f-a2c6-d0f37cb5d14b tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 901.055677] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e7556915-634f-40d6-9e7f-da1c3201d8e4] Creating VM on the ESX host {{(pid=62503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 901.056207] env[62503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6a57e744-0e52-45d4-890c-8a0693d15214 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.075716] env[62503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 901.075716] env[62503]: value = "task-1387894" [ 901.075716] env[62503]: _type = "Task" [ 901.075716] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 901.084063] env[62503]: DEBUG oslo_vmware.api [-] Task: {'id': task-1387894, 'name': CreateVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.177033] env[62503]: DEBUG oslo_vmware.api [None req-30e8662c-df08-452b-b0da-6a8680594759 tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Task: {'id': task-1387892, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.197370] env[62503]: DEBUG oslo_concurrency.lockutils [None req-784ff18d-15f2-4cb8-8623-ff87e0cf2977 tempest-InstanceActionsTestJSON-257954437 tempest-InstanceActionsTestJSON-257954437-project-member] Lock "c39e7ee3-1b97-44ec-92d6-733976c0f0f8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.233s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 901.256583] env[62503]: DEBUG oslo_concurrency.lockutils [None req-40564cc9-512f-4e59-ad26-96223ac8f6aa tempest-InstanceActionsTestJSON-257954437 tempest-InstanceActionsTestJSON-257954437-project-member] Acquiring lock "c39e7ee3-1b97-44ec-92d6-733976c0f0f8" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 901.256900] env[62503]: DEBUG oslo_concurrency.lockutils [None req-40564cc9-512f-4e59-ad26-96223ac8f6aa tempest-InstanceActionsTestJSON-257954437 tempest-InstanceActionsTestJSON-257954437-project-member] Lock "c39e7ee3-1b97-44ec-92d6-733976c0f0f8" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 901.257228] env[62503]: INFO nova.compute.manager [None req-40564cc9-512f-4e59-ad26-96223ac8f6aa tempest-InstanceActionsTestJSON-257954437 tempest-InstanceActionsTestJSON-257954437-project-member] [instance: c39e7ee3-1b97-44ec-92d6-733976c0f0f8] Rebooting instance [ 901.338809] env[62503]: DEBUG nova.network.neutron [req-ca087d18-3f3c-41ad-bb0e-4676dcb51871 req-8515511c-8cb8-4c40-9519-b77ceca4833e service nova] [instance: e7556915-634f-40d6-9e7f-da1c3201d8e4] Updated VIF entry in instance network info cache for port fea5a48c-97b0-4aa5-b402-a421e0392f74. {{(pid=62503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 901.339199] env[62503]: DEBUG nova.network.neutron [req-ca087d18-3f3c-41ad-bb0e-4676dcb51871 req-8515511c-8cb8-4c40-9519-b77ceca4833e service nova] [instance: e7556915-634f-40d6-9e7f-da1c3201d8e4] Updating instance_info_cache with network_info: [{"id": "fea5a48c-97b0-4aa5-b402-a421e0392f74", "address": "fa:16:3e:cb:98:b5", "network": {"id": "db32bcf6-4f31-49a6-a030-459c7952b72c", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1233897985-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eac2608fc52a497f961d018c888a826f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271a82f1-1d09-4ad3-9c15-07269bad114c", "external-id": "nsx-vlan-transportzone-441", "segmentation_id": 441, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfea5a48c-97", "ovs_interfaceid": "fea5a48c-97b0-4aa5-b402-a421e0392f74", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 901.408463] env[62503]: DEBUG nova.network.neutron [-] [instance: ca1f1966-bfe1-495e-b055-f72150f72470] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 901.484221] env[62503]: INFO nova.compute.rpcapi [None req-55afd046-fdf9-4041-8df2-ea3cb62954c4 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Automatically selected compute RPC version 6.4 from minimum service version 68 [ 901.484904] env[62503]: DEBUG oslo_concurrency.lockutils [None req-55afd046-fdf9-4041-8df2-ea3cb62954c4 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Releasing lock "compute-rpcapi-router" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 901.529136] env[62503]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 901.529136] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]5272feab-2f18-7e8e-8464-0e76e6542452" [ 901.529136] env[62503]: _type = "HttpNfcLease" [ 901.529136] env[62503]: } is initializing. {{(pid=62503) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 901.588171] env[62503]: DEBUG oslo_vmware.api [-] Task: {'id': task-1387894, 'name': CreateVM_Task, 'duration_secs': 0.414509} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 901.588426] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e7556915-634f-40d6-9e7f-da1c3201d8e4] Created VM on the ESX host {{(pid=62503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 901.589568] env[62503]: DEBUG oslo_concurrency.lockutils [None req-085a834d-69f1-4b6f-a2c6-d0f37cb5d14b tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 901.589743] env[62503]: DEBUG oslo_concurrency.lockutils [None req-085a834d-69f1-4b6f-a2c6-d0f37cb5d14b tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 901.590316] env[62503]: DEBUG oslo_concurrency.lockutils [None req-085a834d-69f1-4b6f-a2c6-d0f37cb5d14b tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 901.590645] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9d54f962-a82a-404f-b727-357bcc53499c {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.595773] env[62503]: DEBUG oslo_vmware.api [None req-085a834d-69f1-4b6f-a2c6-d0f37cb5d14b tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Waiting for the task: (returnval){ [ 901.595773] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]521afd62-3255-5220-564f-a15eb1591cd9" [ 901.595773] env[62503]: _type = "Task" [ 901.595773] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 901.605387] env[62503]: DEBUG oslo_vmware.api [None req-085a834d-69f1-4b6f-a2c6-d0f37cb5d14b tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]521afd62-3255-5220-564f-a15eb1591cd9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.679226] env[62503]: DEBUG oslo_vmware.api [None req-30e8662c-df08-452b-b0da-6a8680594759 tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Task: {'id': task-1387892, 'name': PowerOnVM_Task, 'duration_secs': 0.608903} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 901.679845] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-30e8662c-df08-452b-b0da-6a8680594759 tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] [instance: 7b8c670d-3f2a-431d-91da-4ced781e6e51] Powered on the VM {{(pid=62503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 901.680170] env[62503]: INFO nova.compute.manager [None req-30e8662c-df08-452b-b0da-6a8680594759 tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] [instance: 7b8c670d-3f2a-431d-91da-4ced781e6e51] Took 7.37 seconds to spawn the instance on the hypervisor. [ 901.680536] env[62503]: DEBUG nova.compute.manager [None req-30e8662c-df08-452b-b0da-6a8680594759 tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] [instance: 7b8c670d-3f2a-431d-91da-4ced781e6e51] Checking state {{(pid=62503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1794}} [ 901.681481] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c70ae70d-0893-4887-8d69-189599e0f73f {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.782969] env[62503]: DEBUG oslo_concurrency.lockutils [None req-40564cc9-512f-4e59-ad26-96223ac8f6aa tempest-InstanceActionsTestJSON-257954437 tempest-InstanceActionsTestJSON-257954437-project-member] Acquiring lock "refresh_cache-c39e7ee3-1b97-44ec-92d6-733976c0f0f8" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 901.783226] env[62503]: DEBUG oslo_concurrency.lockutils [None req-40564cc9-512f-4e59-ad26-96223ac8f6aa tempest-InstanceActionsTestJSON-257954437 tempest-InstanceActionsTestJSON-257954437-project-member] Acquired lock "refresh_cache-c39e7ee3-1b97-44ec-92d6-733976c0f0f8" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 901.783454] env[62503]: DEBUG nova.network.neutron [None req-40564cc9-512f-4e59-ad26-96223ac8f6aa tempest-InstanceActionsTestJSON-257954437 tempest-InstanceActionsTestJSON-257954437-project-member] [instance: c39e7ee3-1b97-44ec-92d6-733976c0f0f8] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 901.842457] env[62503]: DEBUG oslo_concurrency.lockutils [req-ca087d18-3f3c-41ad-bb0e-4676dcb51871 req-8515511c-8cb8-4c40-9519-b77ceca4833e service nova] Releasing lock "refresh_cache-e7556915-634f-40d6-9e7f-da1c3201d8e4" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 901.912250] env[62503]: INFO nova.compute.manager [-] [instance: ca1f1966-bfe1-495e-b055-f72150f72470] Took 1.32 seconds to deallocate network for instance. [ 902.007763] env[62503]: DEBUG oslo_concurrency.lockutils [None req-55afd046-fdf9-4041-8df2-ea3cb62954c4 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Acquiring lock "refresh_cache-c9129f68-c755-4b78-b067-b77b01048c02" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 902.007952] env[62503]: DEBUG oslo_concurrency.lockutils [None req-55afd046-fdf9-4041-8df2-ea3cb62954c4 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Acquired lock "refresh_cache-c9129f68-c755-4b78-b067-b77b01048c02" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 902.008150] env[62503]: DEBUG nova.network.neutron [None req-55afd046-fdf9-4041-8df2-ea3cb62954c4 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: c9129f68-c755-4b78-b067-b77b01048c02] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 902.029850] env[62503]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 902.029850] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]5272feab-2f18-7e8e-8464-0e76e6542452" [ 902.029850] env[62503]: _type = "HttpNfcLease" [ 902.029850] env[62503]: } is ready. {{(pid=62503) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 902.034026] env[62503]: DEBUG oslo_vmware.rw_handles [None req-2026f2a3-ca55-406d-af72-4e933a561068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 902.034026] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]5272feab-2f18-7e8e-8464-0e76e6542452" [ 902.034026] env[62503]: _type = "HttpNfcLease" [ 902.034026] env[62503]: }. {{(pid=62503) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 902.034026] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26c9b39b-2647-471f-9856-fbfe96bfbf09 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.041269] env[62503]: DEBUG oslo_vmware.rw_handles [None req-2026f2a3-ca55-406d-af72-4e933a561068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52f009c6-ea4c-2c56-d3df-7093b57759b4/disk-0.vmdk from lease info. {{(pid=62503) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 902.041269] env[62503]: DEBUG oslo_vmware.rw_handles [None req-2026f2a3-ca55-406d-af72-4e933a561068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Creating HTTP connection to write to file with size = 21334016 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52f009c6-ea4c-2c56-d3df-7093b57759b4/disk-0.vmdk. {{(pid=62503) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 902.119313] env[62503]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-b4c2f2f2-012f-447e-b6c6-d26730beae02 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.121094] env[62503]: DEBUG oslo_vmware.api [None req-085a834d-69f1-4b6f-a2c6-d0f37cb5d14b tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]521afd62-3255-5220-564f-a15eb1591cd9, 'name': SearchDatastore_Task, 'duration_secs': 0.01681} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 902.123224] env[62503]: DEBUG oslo_concurrency.lockutils [None req-085a834d-69f1-4b6f-a2c6-d0f37cb5d14b tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 902.123224] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-085a834d-69f1-4b6f-a2c6-d0f37cb5d14b tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] [instance: e7556915-634f-40d6-9e7f-da1c3201d8e4] Processing image 8150ca02-f879-471d-8913-459408f127a1 {{(pid=62503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 902.123859] env[62503]: DEBUG oslo_concurrency.lockutils [None req-085a834d-69f1-4b6f-a2c6-d0f37cb5d14b tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 902.123859] env[62503]: DEBUG oslo_concurrency.lockutils [None req-085a834d-69f1-4b6f-a2c6-d0f37cb5d14b tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 902.123859] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-085a834d-69f1-4b6f-a2c6-d0f37cb5d14b tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 902.124356] env[62503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-aa52307c-c7c0-48f6-a00c-124efb7e8424 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.134162] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-085a834d-69f1-4b6f-a2c6-d0f37cb5d14b tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 902.134373] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-085a834d-69f1-4b6f-a2c6-d0f37cb5d14b tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 902.135133] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-63a085e9-93dc-49db-9f26-ddeb15d2f73d {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.140651] env[62503]: DEBUG oslo_vmware.api [None req-085a834d-69f1-4b6f-a2c6-d0f37cb5d14b tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Waiting for the task: (returnval){ [ 902.140651] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52ec44b7-ee8b-ac05-a832-7521f256e79d" [ 902.140651] env[62503]: _type = "Task" [ 902.140651] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 902.148881] env[62503]: DEBUG oslo_vmware.api [None req-085a834d-69f1-4b6f-a2c6-d0f37cb5d14b tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52ec44b7-ee8b-ac05-a832-7521f256e79d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.200032] env[62503]: INFO nova.compute.manager [None req-30e8662c-df08-452b-b0da-6a8680594759 tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] [instance: 7b8c670d-3f2a-431d-91da-4ced781e6e51] Took 18.98 seconds to build instance. [ 902.419437] env[62503]: DEBUG oslo_concurrency.lockutils [None req-36110b9f-d2a9-492b-9f90-ecb7dabac9c6 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 902.421819] env[62503]: DEBUG oslo_concurrency.lockutils [None req-36110b9f-d2a9-492b-9f90-ecb7dabac9c6 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 902.421819] env[62503]: DEBUG nova.objects.instance [None req-36110b9f-d2a9-492b-9f90-ecb7dabac9c6 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Lazy-loading 'resources' on Instance uuid ca1f1966-bfe1-495e-b055-f72150f72470 {{(pid=62503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 902.525240] env[62503]: DEBUG nova.network.neutron [None req-40564cc9-512f-4e59-ad26-96223ac8f6aa tempest-InstanceActionsTestJSON-257954437 tempest-InstanceActionsTestJSON-257954437-project-member] [instance: c39e7ee3-1b97-44ec-92d6-733976c0f0f8] Updating instance_info_cache with network_info: [{"id": "ad311052-6c0b-4b0a-ad92-0d1198dbad4c", "address": "fa:16:3e:3b:38:32", "network": {"id": "082037b2-ad22-4d1a-84b7-a53daf45a565", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-1411425723-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f28a2b1084de4f3eafb800bafb0ab68c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c1e7173e-4163-4212-9339-aea3eddd359e", "external-id": "nsx-vlan-transportzone-525", "segmentation_id": 525, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapad311052-6c", "ovs_interfaceid": "ad311052-6c0b-4b0a-ad92-0d1198dbad4c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 902.652783] env[62503]: DEBUG nova.compute.manager [req-e3889a49-8121-4742-b588-b210a14149a0 req-b97fc3ad-7a47-4d72-ab6b-51a521c715d1 service nova] [instance: ca1f1966-bfe1-495e-b055-f72150f72470] Received event network-vif-deleted-02a00a1f-6723-41e6-b91a-5e79048551ce {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 902.656247] env[62503]: DEBUG oslo_vmware.api [None req-085a834d-69f1-4b6f-a2c6-d0f37cb5d14b tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52ec44b7-ee8b-ac05-a832-7521f256e79d, 'name': SearchDatastore_Task, 'duration_secs': 0.013607} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 902.658689] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a4824aea-0d92-4d40-873f-fa6ddfedb1bc {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.671709] env[62503]: DEBUG oslo_vmware.api [None req-085a834d-69f1-4b6f-a2c6-d0f37cb5d14b tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Waiting for the task: (returnval){ [ 902.671709] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]5286a99e-2ef4-6bc1-17ad-88a114ded738" [ 902.671709] env[62503]: _type = "Task" [ 902.671709] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 902.682931] env[62503]: DEBUG oslo_vmware.api [None req-085a834d-69f1-4b6f-a2c6-d0f37cb5d14b tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]5286a99e-2ef4-6bc1-17ad-88a114ded738, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.703491] env[62503]: DEBUG oslo_concurrency.lockutils [None req-30e8662c-df08-452b-b0da-6a8680594759 tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Lock "7b8c670d-3f2a-431d-91da-4ced781e6e51" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.494s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 902.775994] env[62503]: DEBUG nova.network.neutron [None req-55afd046-fdf9-4041-8df2-ea3cb62954c4 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: c9129f68-c755-4b78-b067-b77b01048c02] Updating instance_info_cache with network_info: [{"id": "792806f8-f6ea-4abd-9085-2a2ce83df26b", "address": "fa:16:3e:17:08:5e", "network": {"id": "8feb2896-b13e-4845-9a81-881bf941f703", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1117860439-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0849093c8b48400a8e9d56171ea99e8f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92cdccfd-4b10-4024-b724-5f22792dd4de", "external-id": "nsx-vlan-transportzone-902", "segmentation_id": 902, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap792806f8-f6", "ovs_interfaceid": "792806f8-f6ea-4abd-9085-2a2ce83df26b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 903.028407] env[62503]: DEBUG oslo_concurrency.lockutils [None req-40564cc9-512f-4e59-ad26-96223ac8f6aa tempest-InstanceActionsTestJSON-257954437 tempest-InstanceActionsTestJSON-257954437-project-member] Releasing lock "refresh_cache-c39e7ee3-1b97-44ec-92d6-733976c0f0f8" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 903.031029] env[62503]: DEBUG nova.compute.manager [None req-40564cc9-512f-4e59-ad26-96223ac8f6aa tempest-InstanceActionsTestJSON-257954437 tempest-InstanceActionsTestJSON-257954437-project-member] [instance: c39e7ee3-1b97-44ec-92d6-733976c0f0f8] Checking state {{(pid=62503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1794}} [ 903.031897] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21995391-b03a-4d42-88cb-fac42578f9e1 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.114257] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e0889b9-643b-4a1a-bb88-c200d0556ca6 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.124553] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8f7a4a1-d3e8-479a-a87e-dbc0d5baa49f {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.160237] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-626aa854-8388-4d57-a4a2-447ad5a55383 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.169929] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-407fd48a-3ba7-4c82-ac80-bb9a4d6806ad {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.188118] env[62503]: DEBUG nova.compute.provider_tree [None req-36110b9f-d2a9-492b-9f90-ecb7dabac9c6 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 903.198027] env[62503]: DEBUG oslo_vmware.api [None req-085a834d-69f1-4b6f-a2c6-d0f37cb5d14b tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]5286a99e-2ef4-6bc1-17ad-88a114ded738, 'name': SearchDatastore_Task, 'duration_secs': 0.014456} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 903.198027] env[62503]: DEBUG oslo_concurrency.lockutils [None req-085a834d-69f1-4b6f-a2c6-d0f37cb5d14b tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 903.198027] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-085a834d-69f1-4b6f-a2c6-d0f37cb5d14b tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk to [datastore2] e7556915-634f-40d6-9e7f-da1c3201d8e4/e7556915-634f-40d6-9e7f-da1c3201d8e4.vmdk {{(pid=62503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 903.198027] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-eec7aeed-03c8-4e19-b722-9f6f6ede8530 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.202670] env[62503]: DEBUG oslo_vmware.api [None req-085a834d-69f1-4b6f-a2c6-d0f37cb5d14b tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Waiting for the task: (returnval){ [ 903.202670] env[62503]: value = "task-1387895" [ 903.202670] env[62503]: _type = "Task" [ 903.202670] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 903.212821] env[62503]: DEBUG oslo_vmware.api [None req-085a834d-69f1-4b6f-a2c6-d0f37cb5d14b tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Task: {'id': task-1387895, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.252098] env[62503]: DEBUG oslo_vmware.rw_handles [None req-2026f2a3-ca55-406d-af72-4e933a561068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Completed reading data from the image iterator. {{(pid=62503) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 903.252384] env[62503]: DEBUG oslo_vmware.rw_handles [None req-2026f2a3-ca55-406d-af72-4e933a561068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52f009c6-ea4c-2c56-d3df-7093b57759b4/disk-0.vmdk. {{(pid=62503) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 903.253394] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2eaa4663-17c6-4327-a4e3-616ef96bc6e8 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.260546] env[62503]: DEBUG oslo_vmware.rw_handles [None req-2026f2a3-ca55-406d-af72-4e933a561068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52f009c6-ea4c-2c56-d3df-7093b57759b4/disk-0.vmdk is in state: ready. {{(pid=62503) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 903.260748] env[62503]: DEBUG oslo_vmware.rw_handles [None req-2026f2a3-ca55-406d-af72-4e933a561068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Releasing lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52f009c6-ea4c-2c56-d3df-7093b57759b4/disk-0.vmdk. {{(pid=62503) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 903.260995] env[62503]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-d9208a7f-8347-499a-8d10-39f87c652808 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.279469] env[62503]: DEBUG oslo_concurrency.lockutils [None req-55afd046-fdf9-4041-8df2-ea3cb62954c4 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Releasing lock "refresh_cache-c9129f68-c755-4b78-b067-b77b01048c02" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 903.564647] env[62503]: DEBUG oslo_vmware.rw_handles [None req-2026f2a3-ca55-406d-af72-4e933a561068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Closed VMDK write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52f009c6-ea4c-2c56-d3df-7093b57759b4/disk-0.vmdk. {{(pid=62503) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 903.564941] env[62503]: INFO nova.virt.vmwareapi.images [None req-2026f2a3-ca55-406d-af72-4e933a561068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: c09488ed-e354-4abf-8999-b2f8afec44fc] Downloaded image file data 3306da0e-1d43-4f11-be11-5fe7cf1194eb [ 903.565785] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a392ffba-89de-4a75-938e-933f47c5746b {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.585661] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-896580a8-8e50-42d6-8ee4-f71ea32271e0 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.615519] env[62503]: INFO nova.virt.vmwareapi.images [None req-2026f2a3-ca55-406d-af72-4e933a561068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: c09488ed-e354-4abf-8999-b2f8afec44fc] The imported VM was unregistered [ 903.617245] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-2026f2a3-ca55-406d-af72-4e933a561068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: c09488ed-e354-4abf-8999-b2f8afec44fc] Caching image {{(pid=62503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 903.617489] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-2026f2a3-ca55-406d-af72-4e933a561068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Creating directory with path [datastore1] devstack-image-cache_base/3306da0e-1d43-4f11-be11-5fe7cf1194eb {{(pid=62503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 903.617776] env[62503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e222e712-ee32-4c3a-9da3-7a43669c351c {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.634417] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-2026f2a3-ca55-406d-af72-4e933a561068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Created directory with path [datastore1] devstack-image-cache_base/3306da0e-1d43-4f11-be11-5fe7cf1194eb {{(pid=62503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 903.634576] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-2026f2a3-ca55-406d-af72-4e933a561068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_95a03411-c0a6-48d9-9cd9-1e3e266f3bce/OSTACK_IMG_95a03411-c0a6-48d9-9cd9-1e3e266f3bce.vmdk to [datastore1] devstack-image-cache_base/3306da0e-1d43-4f11-be11-5fe7cf1194eb/3306da0e-1d43-4f11-be11-5fe7cf1194eb.vmdk. {{(pid=62503) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 903.634875] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-f8ab5d31-77bb-406d-b46f-b63718f91338 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.642253] env[62503]: DEBUG oslo_vmware.api [None req-2026f2a3-ca55-406d-af72-4e933a561068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Waiting for the task: (returnval){ [ 903.642253] env[62503]: value = "task-1387897" [ 903.642253] env[62503]: _type = "Task" [ 903.642253] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 903.651060] env[62503]: DEBUG oslo_vmware.api [None req-2026f2a3-ca55-406d-af72-4e933a561068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Task: {'id': task-1387897, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.696600] env[62503]: DEBUG nova.scheduler.client.report [None req-36110b9f-d2a9-492b-9f90-ecb7dabac9c6 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 903.712686] env[62503]: DEBUG oslo_vmware.api [None req-085a834d-69f1-4b6f-a2c6-d0f37cb5d14b tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Task: {'id': task-1387895, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.055031] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1efad6b2-8bc2-481f-968d-14bc26afb447 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.063282] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-40564cc9-512f-4e59-ad26-96223ac8f6aa tempest-InstanceActionsTestJSON-257954437 tempest-InstanceActionsTestJSON-257954437-project-member] [instance: c39e7ee3-1b97-44ec-92d6-733976c0f0f8] Doing hard reboot of VM {{(pid=62503) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1063}} [ 904.065752] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-a130d7bf-412e-4d85-9308-8c618d7d3a74 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.073255] env[62503]: DEBUG oslo_vmware.api [None req-40564cc9-512f-4e59-ad26-96223ac8f6aa tempest-InstanceActionsTestJSON-257954437 tempest-InstanceActionsTestJSON-257954437-project-member] Waiting for the task: (returnval){ [ 904.073255] env[62503]: value = "task-1387898" [ 904.073255] env[62503]: _type = "Task" [ 904.073255] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 904.082795] env[62503]: DEBUG oslo_vmware.api [None req-40564cc9-512f-4e59-ad26-96223ac8f6aa tempest-InstanceActionsTestJSON-257954437 tempest-InstanceActionsTestJSON-257954437-project-member] Task: {'id': task-1387898, 'name': ResetVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.152933] env[62503]: DEBUG oslo_vmware.api [None req-2026f2a3-ca55-406d-af72-4e933a561068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Task: {'id': task-1387897, 'name': MoveVirtualDisk_Task} progress is 15%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.203704] env[62503]: DEBUG oslo_concurrency.lockutils [None req-36110b9f-d2a9-492b-9f90-ecb7dabac9c6 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.782s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 904.221455] env[62503]: DEBUG oslo_vmware.api [None req-085a834d-69f1-4b6f-a2c6-d0f37cb5d14b tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Task: {'id': task-1387895, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.633193} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 904.224116] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-085a834d-69f1-4b6f-a2c6-d0f37cb5d14b tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk to [datastore2] e7556915-634f-40d6-9e7f-da1c3201d8e4/e7556915-634f-40d6-9e7f-da1c3201d8e4.vmdk {{(pid=62503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 904.225116] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-085a834d-69f1-4b6f-a2c6-d0f37cb5d14b tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] [instance: e7556915-634f-40d6-9e7f-da1c3201d8e4] Extending root virtual disk to 1048576 {{(pid=62503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 904.227643] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3325d3e1-9327-4507-8704-d409d26d3db7 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.240153] env[62503]: DEBUG oslo_vmware.api [None req-085a834d-69f1-4b6f-a2c6-d0f37cb5d14b tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Waiting for the task: (returnval){ [ 904.240153] env[62503]: value = "task-1387899" [ 904.240153] env[62503]: _type = "Task" [ 904.240153] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 904.247607] env[62503]: INFO nova.scheduler.client.report [None req-36110b9f-d2a9-492b-9f90-ecb7dabac9c6 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Deleted allocations for instance ca1f1966-bfe1-495e-b055-f72150f72470 [ 904.261343] env[62503]: DEBUG oslo_vmware.api [None req-085a834d-69f1-4b6f-a2c6-d0f37cb5d14b tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Task: {'id': task-1387899, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.587124] env[62503]: DEBUG oslo_vmware.api [None req-40564cc9-512f-4e59-ad26-96223ac8f6aa tempest-InstanceActionsTestJSON-257954437 tempest-InstanceActionsTestJSON-257954437-project-member] Task: {'id': task-1387898, 'name': ResetVM_Task, 'duration_secs': 0.226101} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 904.587124] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-40564cc9-512f-4e59-ad26-96223ac8f6aa tempest-InstanceActionsTestJSON-257954437 tempest-InstanceActionsTestJSON-257954437-project-member] [instance: c39e7ee3-1b97-44ec-92d6-733976c0f0f8] Did hard reboot of VM {{(pid=62503) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1067}} [ 904.587124] env[62503]: DEBUG nova.compute.manager [None req-40564cc9-512f-4e59-ad26-96223ac8f6aa tempest-InstanceActionsTestJSON-257954437 tempest-InstanceActionsTestJSON-257954437-project-member] [instance: c39e7ee3-1b97-44ec-92d6-733976c0f0f8] Checking state {{(pid=62503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1794}} [ 904.587500] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6eab147-2084-45e8-9a96-a510a68bf4ba {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.653897] env[62503]: DEBUG oslo_vmware.api [None req-2026f2a3-ca55-406d-af72-4e933a561068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Task: {'id': task-1387897, 'name': MoveVirtualDisk_Task} progress is 35%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.748411] env[62503]: DEBUG oslo_vmware.api [None req-085a834d-69f1-4b6f-a2c6-d0f37cb5d14b tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Task: {'id': task-1387899, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.104987} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 904.748721] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-085a834d-69f1-4b6f-a2c6-d0f37cb5d14b tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] [instance: e7556915-634f-40d6-9e7f-da1c3201d8e4] Extended root virtual disk {{(pid=62503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 904.749667] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4fcb120-99ee-4ac4-83e5-7eec8ce396b1 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.775929] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-085a834d-69f1-4b6f-a2c6-d0f37cb5d14b tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] [instance: e7556915-634f-40d6-9e7f-da1c3201d8e4] Reconfiguring VM instance instance-00000050 to attach disk [datastore2] e7556915-634f-40d6-9e7f-da1c3201d8e4/e7556915-634f-40d6-9e7f-da1c3201d8e4.vmdk or device None with type sparse {{(pid=62503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 904.776713] env[62503]: DEBUG oslo_concurrency.lockutils [None req-36110b9f-d2a9-492b-9f90-ecb7dabac9c6 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Lock "ca1f1966-bfe1-495e-b055-f72150f72470" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.397s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 904.778106] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-11ec9572-d4be-41a2-9e74-614f7955ce21 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.798101] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90159e5f-606b-4757-8a80-fa61b526b437 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.803898] env[62503]: DEBUG oslo_vmware.api [None req-085a834d-69f1-4b6f-a2c6-d0f37cb5d14b tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Waiting for the task: (returnval){ [ 904.803898] env[62503]: value = "task-1387900" [ 904.803898] env[62503]: _type = "Task" [ 904.803898] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 904.821277] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-55afd046-fdf9-4041-8df2-ea3cb62954c4 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: c9129f68-c755-4b78-b067-b77b01048c02] Updating instance 'c9129f68-c755-4b78-b067-b77b01048c02' progress to 0 {{(pid=62503) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 904.830641] env[62503]: DEBUG oslo_vmware.api [None req-085a834d-69f1-4b6f-a2c6-d0f37cb5d14b tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Task: {'id': task-1387900, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.105328] env[62503]: DEBUG oslo_concurrency.lockutils [None req-40564cc9-512f-4e59-ad26-96223ac8f6aa tempest-InstanceActionsTestJSON-257954437 tempest-InstanceActionsTestJSON-257954437-project-member] Lock "c39e7ee3-1b97-44ec-92d6-733976c0f0f8" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 3.848s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 905.156485] env[62503]: DEBUG oslo_vmware.api [None req-2026f2a3-ca55-406d-af72-4e933a561068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Task: {'id': task-1387897, 'name': MoveVirtualDisk_Task} progress is 54%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.315773] env[62503]: DEBUG oslo_vmware.api [None req-085a834d-69f1-4b6f-a2c6-d0f37cb5d14b tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Task: {'id': task-1387900, 'name': ReconfigVM_Task, 'duration_secs': 0.387309} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 905.316305] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-085a834d-69f1-4b6f-a2c6-d0f37cb5d14b tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] [instance: e7556915-634f-40d6-9e7f-da1c3201d8e4] Reconfigured VM instance instance-00000050 to attach disk [datastore2] e7556915-634f-40d6-9e7f-da1c3201d8e4/e7556915-634f-40d6-9e7f-da1c3201d8e4.vmdk or device None with type sparse {{(pid=62503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 905.317218] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-64cfc1fe-d350-40ed-844d-a3dccc9b54c1 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.327571] env[62503]: DEBUG oslo_vmware.api [None req-085a834d-69f1-4b6f-a2c6-d0f37cb5d14b tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Waiting for the task: (returnval){ [ 905.327571] env[62503]: value = "task-1387901" [ 905.327571] env[62503]: _type = "Task" [ 905.327571] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 905.331920] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-55afd046-fdf9-4041-8df2-ea3cb62954c4 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: c9129f68-c755-4b78-b067-b77b01048c02] Powering off the VM {{(pid=62503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 905.332423] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b45a6200-3429-4d70-901d-3714a2dc5705 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.339843] env[62503]: DEBUG oslo_vmware.api [None req-085a834d-69f1-4b6f-a2c6-d0f37cb5d14b tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Task: {'id': task-1387901, 'name': Rename_Task} progress is 10%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.342122] env[62503]: DEBUG oslo_vmware.api [None req-55afd046-fdf9-4041-8df2-ea3cb62954c4 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Waiting for the task: (returnval){ [ 905.342122] env[62503]: value = "task-1387902" [ 905.342122] env[62503]: _type = "Task" [ 905.342122] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 905.352346] env[62503]: DEBUG oslo_vmware.api [None req-55afd046-fdf9-4041-8df2-ea3cb62954c4 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Task: {'id': task-1387902, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.658157] env[62503]: DEBUG oslo_vmware.api [None req-2026f2a3-ca55-406d-af72-4e933a561068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Task: {'id': task-1387897, 'name': MoveVirtualDisk_Task} progress is 77%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.837650] env[62503]: DEBUG oslo_vmware.api [None req-085a834d-69f1-4b6f-a2c6-d0f37cb5d14b tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Task: {'id': task-1387901, 'name': Rename_Task, 'duration_secs': 0.168095} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 905.838018] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-085a834d-69f1-4b6f-a2c6-d0f37cb5d14b tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] [instance: e7556915-634f-40d6-9e7f-da1c3201d8e4] Powering on the VM {{(pid=62503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 905.838312] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d9857e4d-942e-4eb2-95eb-4b18191931aa {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.848793] env[62503]: DEBUG oslo_vmware.api [None req-085a834d-69f1-4b6f-a2c6-d0f37cb5d14b tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Waiting for the task: (returnval){ [ 905.848793] env[62503]: value = "task-1387903" [ 905.848793] env[62503]: _type = "Task" [ 905.848793] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 905.856424] env[62503]: DEBUG oslo_vmware.api [None req-55afd046-fdf9-4041-8df2-ea3cb62954c4 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Task: {'id': task-1387902, 'name': PowerOffVM_Task, 'duration_secs': 0.294488} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 905.857286] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-55afd046-fdf9-4041-8df2-ea3cb62954c4 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: c9129f68-c755-4b78-b067-b77b01048c02] Powered off the VM {{(pid=62503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 905.857489] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-55afd046-fdf9-4041-8df2-ea3cb62954c4 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: c9129f68-c755-4b78-b067-b77b01048c02] Updating instance 'c9129f68-c755-4b78-b067-b77b01048c02' progress to 17 {{(pid=62503) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 905.871038] env[62503]: DEBUG oslo_vmware.api [None req-085a834d-69f1-4b6f-a2c6-d0f37cb5d14b tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Task: {'id': task-1387903, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.926059] env[62503]: DEBUG oslo_concurrency.lockutils [None req-36792c89-a1e6-4e62-ac01-9fd3087c2872 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Acquiring lock "04054a79-70b6-409a-981f-6bf99fc3b4fc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 905.926059] env[62503]: DEBUG oslo_concurrency.lockutils [None req-36792c89-a1e6-4e62-ac01-9fd3087c2872 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Lock "04054a79-70b6-409a-981f-6bf99fc3b4fc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 906.163201] env[62503]: DEBUG oslo_vmware.api [None req-2026f2a3-ca55-406d-af72-4e933a561068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Task: {'id': task-1387897, 'name': MoveVirtualDisk_Task} progress is 97%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.210538] env[62503]: DEBUG oslo_concurrency.lockutils [None req-4ff360a7-ca69-4732-812f-d6e86d096df9 tempest-InstanceActionsTestJSON-257954437 tempest-InstanceActionsTestJSON-257954437-project-member] Acquiring lock "c39e7ee3-1b97-44ec-92d6-733976c0f0f8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 906.210864] env[62503]: DEBUG oslo_concurrency.lockutils [None req-4ff360a7-ca69-4732-812f-d6e86d096df9 tempest-InstanceActionsTestJSON-257954437 tempest-InstanceActionsTestJSON-257954437-project-member] Lock "c39e7ee3-1b97-44ec-92d6-733976c0f0f8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 906.211098] env[62503]: DEBUG oslo_concurrency.lockutils [None req-4ff360a7-ca69-4732-812f-d6e86d096df9 tempest-InstanceActionsTestJSON-257954437 tempest-InstanceActionsTestJSON-257954437-project-member] Acquiring lock "c39e7ee3-1b97-44ec-92d6-733976c0f0f8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 906.211672] env[62503]: DEBUG oslo_concurrency.lockutils [None req-4ff360a7-ca69-4732-812f-d6e86d096df9 tempest-InstanceActionsTestJSON-257954437 tempest-InstanceActionsTestJSON-257954437-project-member] Lock "c39e7ee3-1b97-44ec-92d6-733976c0f0f8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 906.211672] env[62503]: DEBUG oslo_concurrency.lockutils [None req-4ff360a7-ca69-4732-812f-d6e86d096df9 tempest-InstanceActionsTestJSON-257954437 tempest-InstanceActionsTestJSON-257954437-project-member] Lock "c39e7ee3-1b97-44ec-92d6-733976c0f0f8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 906.213916] env[62503]: INFO nova.compute.manager [None req-4ff360a7-ca69-4732-812f-d6e86d096df9 tempest-InstanceActionsTestJSON-257954437 tempest-InstanceActionsTestJSON-257954437-project-member] [instance: c39e7ee3-1b97-44ec-92d6-733976c0f0f8] Terminating instance [ 906.216264] env[62503]: DEBUG nova.compute.manager [None req-4ff360a7-ca69-4732-812f-d6e86d096df9 tempest-InstanceActionsTestJSON-257954437 tempest-InstanceActionsTestJSON-257954437-project-member] [instance: c39e7ee3-1b97-44ec-92d6-733976c0f0f8] Start destroying the instance on the hypervisor. {{(pid=62503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3156}} [ 906.216469] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-4ff360a7-ca69-4732-812f-d6e86d096df9 tempest-InstanceActionsTestJSON-257954437 tempest-InstanceActionsTestJSON-257954437-project-member] [instance: c39e7ee3-1b97-44ec-92d6-733976c0f0f8] Destroying instance {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 906.217336] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46c69825-a85a-40d8-800e-6d4e96617c91 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.225549] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ff360a7-ca69-4732-812f-d6e86d096df9 tempest-InstanceActionsTestJSON-257954437 tempest-InstanceActionsTestJSON-257954437-project-member] [instance: c39e7ee3-1b97-44ec-92d6-733976c0f0f8] Powering off the VM {{(pid=62503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 906.225802] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5e476e20-4b44-4945-a52f-b2222c600fa7 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.233122] env[62503]: DEBUG oslo_vmware.api [None req-4ff360a7-ca69-4732-812f-d6e86d096df9 tempest-InstanceActionsTestJSON-257954437 tempest-InstanceActionsTestJSON-257954437-project-member] Waiting for the task: (returnval){ [ 906.233122] env[62503]: value = "task-1387904" [ 906.233122] env[62503]: _type = "Task" [ 906.233122] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 906.241975] env[62503]: DEBUG oslo_vmware.api [None req-4ff360a7-ca69-4732-812f-d6e86d096df9 tempest-InstanceActionsTestJSON-257954437 tempest-InstanceActionsTestJSON-257954437-project-member] Task: {'id': task-1387904, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.362693] env[62503]: DEBUG oslo_vmware.api [None req-085a834d-69f1-4b6f-a2c6-d0f37cb5d14b tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Task: {'id': task-1387903, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.374349] env[62503]: DEBUG nova.virt.hardware [None req-55afd046-fdf9-4041-8df2-ea3cb62954c4 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:26:22Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=8150ca02-f879-471d-8913-459408f127a1,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 906.374655] env[62503]: DEBUG nova.virt.hardware [None req-55afd046-fdf9-4041-8df2-ea3cb62954c4 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Flavor limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 906.374780] env[62503]: DEBUG nova.virt.hardware [None req-55afd046-fdf9-4041-8df2-ea3cb62954c4 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Image limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 906.374974] env[62503]: DEBUG nova.virt.hardware [None req-55afd046-fdf9-4041-8df2-ea3cb62954c4 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Flavor pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 906.375137] env[62503]: DEBUG nova.virt.hardware [None req-55afd046-fdf9-4041-8df2-ea3cb62954c4 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Image pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 906.375291] env[62503]: DEBUG nova.virt.hardware [None req-55afd046-fdf9-4041-8df2-ea3cb62954c4 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 906.375506] env[62503]: DEBUG nova.virt.hardware [None req-55afd046-fdf9-4041-8df2-ea3cb62954c4 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 906.375659] env[62503]: DEBUG nova.virt.hardware [None req-55afd046-fdf9-4041-8df2-ea3cb62954c4 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 906.375827] env[62503]: DEBUG nova.virt.hardware [None req-55afd046-fdf9-4041-8df2-ea3cb62954c4 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Got 1 possible topologies {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 906.376458] env[62503]: DEBUG nova.virt.hardware [None req-55afd046-fdf9-4041-8df2-ea3cb62954c4 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 906.376458] env[62503]: DEBUG nova.virt.hardware [None req-55afd046-fdf9-4041-8df2-ea3cb62954c4 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 906.381707] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-23db0bd5-1e83-436f-8807-1cd2e5ffe5e9 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.398868] env[62503]: DEBUG oslo_vmware.api [None req-55afd046-fdf9-4041-8df2-ea3cb62954c4 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Waiting for the task: (returnval){ [ 906.398868] env[62503]: value = "task-1387905" [ 906.398868] env[62503]: _type = "Task" [ 906.398868] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 906.408015] env[62503]: DEBUG oslo_vmware.api [None req-55afd046-fdf9-4041-8df2-ea3cb62954c4 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Task: {'id': task-1387905, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.426728] env[62503]: DEBUG nova.compute.manager [None req-36792c89-a1e6-4e62-ac01-9fd3087c2872 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: 04054a79-70b6-409a-981f-6bf99fc3b4fc] Starting instance... {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 906.658966] env[62503]: DEBUG oslo_vmware.api [None req-2026f2a3-ca55-406d-af72-4e933a561068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Task: {'id': task-1387897, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.661315} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 906.659369] env[62503]: INFO nova.virt.vmwareapi.ds_util [None req-2026f2a3-ca55-406d-af72-4e933a561068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_95a03411-c0a6-48d9-9cd9-1e3e266f3bce/OSTACK_IMG_95a03411-c0a6-48d9-9cd9-1e3e266f3bce.vmdk to [datastore1] devstack-image-cache_base/3306da0e-1d43-4f11-be11-5fe7cf1194eb/3306da0e-1d43-4f11-be11-5fe7cf1194eb.vmdk. [ 906.659472] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-2026f2a3-ca55-406d-af72-4e933a561068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: c09488ed-e354-4abf-8999-b2f8afec44fc] Cleaning up location [datastore1] OSTACK_IMG_95a03411-c0a6-48d9-9cd9-1e3e266f3bce {{(pid=62503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 906.659609] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-2026f2a3-ca55-406d-af72-4e933a561068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_95a03411-c0a6-48d9-9cd9-1e3e266f3bce {{(pid=62503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 906.659857] env[62503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a79589ab-b419-46d2-97fd-0c560aa8ac74 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.665461] env[62503]: DEBUG oslo_vmware.api [None req-2026f2a3-ca55-406d-af72-4e933a561068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Waiting for the task: (returnval){ [ 906.665461] env[62503]: value = "task-1387906" [ 906.665461] env[62503]: _type = "Task" [ 906.665461] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 906.674305] env[62503]: DEBUG oslo_vmware.api [None req-2026f2a3-ca55-406d-af72-4e933a561068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Task: {'id': task-1387906, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.744708] env[62503]: DEBUG oslo_vmware.api [None req-4ff360a7-ca69-4732-812f-d6e86d096df9 tempest-InstanceActionsTestJSON-257954437 tempest-InstanceActionsTestJSON-257954437-project-member] Task: {'id': task-1387904, 'name': PowerOffVM_Task, 'duration_secs': 0.326169} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 906.745018] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ff360a7-ca69-4732-812f-d6e86d096df9 tempest-InstanceActionsTestJSON-257954437 tempest-InstanceActionsTestJSON-257954437-project-member] [instance: c39e7ee3-1b97-44ec-92d6-733976c0f0f8] Powered off the VM {{(pid=62503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 906.745204] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-4ff360a7-ca69-4732-812f-d6e86d096df9 tempest-InstanceActionsTestJSON-257954437 tempest-InstanceActionsTestJSON-257954437-project-member] [instance: c39e7ee3-1b97-44ec-92d6-733976c0f0f8] Unregistering the VM {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 906.745714] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d865e7c2-c2ca-41ad-b06c-40f24a060d14 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.820067] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-4ff360a7-ca69-4732-812f-d6e86d096df9 tempest-InstanceActionsTestJSON-257954437 tempest-InstanceActionsTestJSON-257954437-project-member] [instance: c39e7ee3-1b97-44ec-92d6-733976c0f0f8] Unregistered the VM {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 906.820363] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-4ff360a7-ca69-4732-812f-d6e86d096df9 tempest-InstanceActionsTestJSON-257954437 tempest-InstanceActionsTestJSON-257954437-project-member] [instance: c39e7ee3-1b97-44ec-92d6-733976c0f0f8] Deleting contents of the VM from datastore datastore1 {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 906.820612] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-4ff360a7-ca69-4732-812f-d6e86d096df9 tempest-InstanceActionsTestJSON-257954437 tempest-InstanceActionsTestJSON-257954437-project-member] Deleting the datastore file [datastore1] c39e7ee3-1b97-44ec-92d6-733976c0f0f8 {{(pid=62503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 906.820954] env[62503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-be176bae-f4e8-4a22-8470-b0009a2e659b {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.828787] env[62503]: DEBUG oslo_vmware.api [None req-4ff360a7-ca69-4732-812f-d6e86d096df9 tempest-InstanceActionsTestJSON-257954437 tempest-InstanceActionsTestJSON-257954437-project-member] Waiting for the task: (returnval){ [ 906.828787] env[62503]: value = "task-1387908" [ 906.828787] env[62503]: _type = "Task" [ 906.828787] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 906.836911] env[62503]: DEBUG oslo_concurrency.lockutils [None req-2faef68b-c1ae-4dfa-9919-ba7d26bd1753 tempest-ServerTagsTestJSON-53821113 tempest-ServerTagsTestJSON-53821113-project-member] Acquiring lock "c02c01b0-6f69-424f-8c0f-5a5c9ce1fad7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 906.837159] env[62503]: DEBUG oslo_concurrency.lockutils [None req-2faef68b-c1ae-4dfa-9919-ba7d26bd1753 tempest-ServerTagsTestJSON-53821113 tempest-ServerTagsTestJSON-53821113-project-member] Lock "c02c01b0-6f69-424f-8c0f-5a5c9ce1fad7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 906.841994] env[62503]: DEBUG oslo_vmware.api [None req-4ff360a7-ca69-4732-812f-d6e86d096df9 tempest-InstanceActionsTestJSON-257954437 tempest-InstanceActionsTestJSON-257954437-project-member] Task: {'id': task-1387908, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.859875] env[62503]: DEBUG oslo_vmware.api [None req-085a834d-69f1-4b6f-a2c6-d0f37cb5d14b tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Task: {'id': task-1387903, 'name': PowerOnVM_Task, 'duration_secs': 0.563327} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 906.860174] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-085a834d-69f1-4b6f-a2c6-d0f37cb5d14b tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] [instance: e7556915-634f-40d6-9e7f-da1c3201d8e4] Powered on the VM {{(pid=62503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 906.860384] env[62503]: INFO nova.compute.manager [None req-085a834d-69f1-4b6f-a2c6-d0f37cb5d14b tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] [instance: e7556915-634f-40d6-9e7f-da1c3201d8e4] Took 7.97 seconds to spawn the instance on the hypervisor. [ 906.860623] env[62503]: DEBUG nova.compute.manager [None req-085a834d-69f1-4b6f-a2c6-d0f37cb5d14b tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] [instance: e7556915-634f-40d6-9e7f-da1c3201d8e4] Checking state {{(pid=62503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1794}} [ 906.861391] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3e44929-c70f-44cc-909a-f075d0de6915 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.909012] env[62503]: DEBUG oslo_vmware.api [None req-55afd046-fdf9-4041-8df2-ea3cb62954c4 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Task: {'id': task-1387905, 'name': ReconfigVM_Task, 'duration_secs': 0.15309} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 906.909930] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-55afd046-fdf9-4041-8df2-ea3cb62954c4 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: c9129f68-c755-4b78-b067-b77b01048c02] Updating instance 'c9129f68-c755-4b78-b067-b77b01048c02' progress to 33 {{(pid=62503) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 906.953115] env[62503]: DEBUG oslo_concurrency.lockutils [None req-36792c89-a1e6-4e62-ac01-9fd3087c2872 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 906.953444] env[62503]: DEBUG oslo_concurrency.lockutils [None req-36792c89-a1e6-4e62-ac01-9fd3087c2872 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 906.955071] env[62503]: INFO nova.compute.claims [None req-36792c89-a1e6-4e62-ac01-9fd3087c2872 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: 04054a79-70b6-409a-981f-6bf99fc3b4fc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 907.176903] env[62503]: DEBUG oslo_vmware.api [None req-2026f2a3-ca55-406d-af72-4e933a561068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Task: {'id': task-1387906, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.114842} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 907.177337] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-2026f2a3-ca55-406d-af72-4e933a561068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Deleted the datastore file {{(pid=62503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 907.177590] env[62503]: DEBUG oslo_concurrency.lockutils [None req-2026f2a3-ca55-406d-af72-4e933a561068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3306da0e-1d43-4f11-be11-5fe7cf1194eb/3306da0e-1d43-4f11-be11-5fe7cf1194eb.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 907.177872] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-2026f2a3-ca55-406d-af72-4e933a561068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/3306da0e-1d43-4f11-be11-5fe7cf1194eb/3306da0e-1d43-4f11-be11-5fe7cf1194eb.vmdk to [datastore1] c09488ed-e354-4abf-8999-b2f8afec44fc/c09488ed-e354-4abf-8999-b2f8afec44fc.vmdk {{(pid=62503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 907.178160] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-41de25b6-5029-423d-a759-993579dfe2f6 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.185177] env[62503]: DEBUG oslo_vmware.api [None req-2026f2a3-ca55-406d-af72-4e933a561068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Waiting for the task: (returnval){ [ 907.185177] env[62503]: value = "task-1387909" [ 907.185177] env[62503]: _type = "Task" [ 907.185177] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.197340] env[62503]: DEBUG oslo_vmware.api [None req-2026f2a3-ca55-406d-af72-4e933a561068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Task: {'id': task-1387909, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.338527] env[62503]: DEBUG oslo_vmware.api [None req-4ff360a7-ca69-4732-812f-d6e86d096df9 tempest-InstanceActionsTestJSON-257954437 tempest-InstanceActionsTestJSON-257954437-project-member] Task: {'id': task-1387908, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.343110] env[62503]: DEBUG nova.compute.manager [None req-2faef68b-c1ae-4dfa-9919-ba7d26bd1753 tempest-ServerTagsTestJSON-53821113 tempest-ServerTagsTestJSON-53821113-project-member] [instance: c02c01b0-6f69-424f-8c0f-5a5c9ce1fad7] Starting instance... {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 907.379910] env[62503]: INFO nova.compute.manager [None req-085a834d-69f1-4b6f-a2c6-d0f37cb5d14b tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] [instance: e7556915-634f-40d6-9e7f-da1c3201d8e4] Took 23.47 seconds to build instance. [ 907.417844] env[62503]: DEBUG nova.virt.hardware [None req-55afd046-fdf9-4041-8df2-ea3cb62954c4 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:26:20Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=8150ca02-f879-471d-8913-459408f127a1,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 907.417844] env[62503]: DEBUG nova.virt.hardware [None req-55afd046-fdf9-4041-8df2-ea3cb62954c4 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Flavor limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 907.417844] env[62503]: DEBUG nova.virt.hardware [None req-55afd046-fdf9-4041-8df2-ea3cb62954c4 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Image limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 907.417844] env[62503]: DEBUG nova.virt.hardware [None req-55afd046-fdf9-4041-8df2-ea3cb62954c4 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Flavor pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 907.417844] env[62503]: DEBUG nova.virt.hardware [None req-55afd046-fdf9-4041-8df2-ea3cb62954c4 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Image pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 907.417844] env[62503]: DEBUG nova.virt.hardware [None req-55afd046-fdf9-4041-8df2-ea3cb62954c4 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 907.417844] env[62503]: DEBUG nova.virt.hardware [None req-55afd046-fdf9-4041-8df2-ea3cb62954c4 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 907.417844] env[62503]: DEBUG nova.virt.hardware [None req-55afd046-fdf9-4041-8df2-ea3cb62954c4 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 907.418348] env[62503]: DEBUG nova.virt.hardware [None req-55afd046-fdf9-4041-8df2-ea3cb62954c4 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Got 1 possible topologies {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 907.418348] env[62503]: DEBUG nova.virt.hardware [None req-55afd046-fdf9-4041-8df2-ea3cb62954c4 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 907.418497] env[62503]: DEBUG nova.virt.hardware [None req-55afd046-fdf9-4041-8df2-ea3cb62954c4 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 907.424497] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-55afd046-fdf9-4041-8df2-ea3cb62954c4 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: c9129f68-c755-4b78-b067-b77b01048c02] Reconfiguring VM instance instance-00000049 to detach disk 2000 {{(pid=62503) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 907.425406] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0c2068f5-d2cb-419f-a514-d4a9779c7b8b {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.444030] env[62503]: DEBUG oslo_vmware.api [None req-55afd046-fdf9-4041-8df2-ea3cb62954c4 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Waiting for the task: (returnval){ [ 907.444030] env[62503]: value = "task-1387910" [ 907.444030] env[62503]: _type = "Task" [ 907.444030] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.453814] env[62503]: DEBUG oslo_vmware.api [None req-55afd046-fdf9-4041-8df2-ea3cb62954c4 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Task: {'id': task-1387910, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.696048] env[62503]: DEBUG oslo_vmware.api [None req-2026f2a3-ca55-406d-af72-4e933a561068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Task: {'id': task-1387909, 'name': CopyVirtualDisk_Task} progress is 9%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.839942] env[62503]: DEBUG oslo_vmware.api [None req-4ff360a7-ca69-4732-812f-d6e86d096df9 tempest-InstanceActionsTestJSON-257954437 tempest-InstanceActionsTestJSON-257954437-project-member] Task: {'id': task-1387908, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.735618} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 907.840152] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-4ff360a7-ca69-4732-812f-d6e86d096df9 tempest-InstanceActionsTestJSON-257954437 tempest-InstanceActionsTestJSON-257954437-project-member] Deleted the datastore file {{(pid=62503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 907.840370] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-4ff360a7-ca69-4732-812f-d6e86d096df9 tempest-InstanceActionsTestJSON-257954437 tempest-InstanceActionsTestJSON-257954437-project-member] [instance: c39e7ee3-1b97-44ec-92d6-733976c0f0f8] Deleted contents of the VM from datastore datastore1 {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 907.840583] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-4ff360a7-ca69-4732-812f-d6e86d096df9 tempest-InstanceActionsTestJSON-257954437 tempest-InstanceActionsTestJSON-257954437-project-member] [instance: c39e7ee3-1b97-44ec-92d6-733976c0f0f8] Instance destroyed {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 907.840806] env[62503]: INFO nova.compute.manager [None req-4ff360a7-ca69-4732-812f-d6e86d096df9 tempest-InstanceActionsTestJSON-257954437 tempest-InstanceActionsTestJSON-257954437-project-member] [instance: c39e7ee3-1b97-44ec-92d6-733976c0f0f8] Took 1.62 seconds to destroy the instance on the hypervisor. [ 907.841146] env[62503]: DEBUG oslo.service.loopingcall [None req-4ff360a7-ca69-4732-812f-d6e86d096df9 tempest-InstanceActionsTestJSON-257954437 tempest-InstanceActionsTestJSON-257954437-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 907.841368] env[62503]: DEBUG nova.compute.manager [-] [instance: c39e7ee3-1b97-44ec-92d6-733976c0f0f8] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 907.841466] env[62503]: DEBUG nova.network.neutron [-] [instance: c39e7ee3-1b97-44ec-92d6-733976c0f0f8] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 907.865809] env[62503]: DEBUG oslo_concurrency.lockutils [None req-2faef68b-c1ae-4dfa-9919-ba7d26bd1753 tempest-ServerTagsTestJSON-53821113 tempest-ServerTagsTestJSON-53821113-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 907.881353] env[62503]: DEBUG oslo_concurrency.lockutils [None req-085a834d-69f1-4b6f-a2c6-d0f37cb5d14b tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Lock "e7556915-634f-40d6-9e7f-da1c3201d8e4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.984s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 907.955977] env[62503]: DEBUG oslo_vmware.api [None req-55afd046-fdf9-4041-8df2-ea3cb62954c4 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Task: {'id': task-1387910, 'name': ReconfigVM_Task, 'duration_secs': 0.164405} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 907.956673] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-55afd046-fdf9-4041-8df2-ea3cb62954c4 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: c9129f68-c755-4b78-b067-b77b01048c02] Reconfigured VM instance instance-00000049 to detach disk 2000 {{(pid=62503) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 907.957873] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0134244-d20c-4981-b22e-debfcad24a28 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.004011] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-55afd046-fdf9-4041-8df2-ea3cb62954c4 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: c9129f68-c755-4b78-b067-b77b01048c02] Reconfiguring VM instance instance-00000049 to attach disk [datastore2] c9129f68-c755-4b78-b067-b77b01048c02/c9129f68-c755-4b78-b067-b77b01048c02.vmdk or device None with type thin {{(pid=62503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 908.004255] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-25a37500-987f-4d79-b66a-3541efcacccd {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.031403] env[62503]: DEBUG oslo_vmware.api [None req-55afd046-fdf9-4041-8df2-ea3cb62954c4 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Waiting for the task: (returnval){ [ 908.031403] env[62503]: value = "task-1387911" [ 908.031403] env[62503]: _type = "Task" [ 908.031403] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 908.042325] env[62503]: DEBUG oslo_vmware.api [None req-55afd046-fdf9-4041-8df2-ea3cb62954c4 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Task: {'id': task-1387911, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.201022] env[62503]: DEBUG oslo_vmware.api [None req-2026f2a3-ca55-406d-af72-4e933a561068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Task: {'id': task-1387909, 'name': CopyVirtualDisk_Task} progress is 32%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.209217] env[62503]: DEBUG nova.compute.manager [req-02497162-d2ff-44c5-b9a2-e082024b191a req-c4459dfa-7e0d-4087-bd8a-61438202ed53 service nova] [instance: c39e7ee3-1b97-44ec-92d6-733976c0f0f8] Received event network-vif-deleted-ad311052-6c0b-4b0a-ad92-0d1198dbad4c {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 908.209217] env[62503]: INFO nova.compute.manager [req-02497162-d2ff-44c5-b9a2-e082024b191a req-c4459dfa-7e0d-4087-bd8a-61438202ed53 service nova] [instance: c39e7ee3-1b97-44ec-92d6-733976c0f0f8] Neutron deleted interface ad311052-6c0b-4b0a-ad92-0d1198dbad4c; detaching it from the instance and deleting it from the info cache [ 908.209217] env[62503]: DEBUG nova.network.neutron [req-02497162-d2ff-44c5-b9a2-e082024b191a req-c4459dfa-7e0d-4087-bd8a-61438202ed53 service nova] [instance: c39e7ee3-1b97-44ec-92d6-733976c0f0f8] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 908.268532] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef04090f-2756-4e88-8887-fa4f30ac99e9 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.278073] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce6c8985-39a8-49ce-ae37-e2ea3e609a9d {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.317311] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c82e0de0-8ea3-4f17-ba5c-102c50723d34 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.325854] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3591d18-bb5e-44d5-b957-3057d7ac674b {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.341589] env[62503]: DEBUG nova.compute.provider_tree [None req-36792c89-a1e6-4e62-ac01-9fd3087c2872 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 908.547030] env[62503]: DEBUG oslo_vmware.api [None req-55afd046-fdf9-4041-8df2-ea3cb62954c4 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Task: {'id': task-1387911, 'name': ReconfigVM_Task, 'duration_secs': 0.346491} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 908.547030] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-55afd046-fdf9-4041-8df2-ea3cb62954c4 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: c9129f68-c755-4b78-b067-b77b01048c02] Reconfigured VM instance instance-00000049 to attach disk [datastore2] c9129f68-c755-4b78-b067-b77b01048c02/c9129f68-c755-4b78-b067-b77b01048c02.vmdk or device None with type thin {{(pid=62503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 908.547288] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-55afd046-fdf9-4041-8df2-ea3cb62954c4 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: c9129f68-c755-4b78-b067-b77b01048c02] Updating instance 'c9129f68-c755-4b78-b067-b77b01048c02' progress to 50 {{(pid=62503) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 908.686423] env[62503]: DEBUG nova.network.neutron [-] [instance: c39e7ee3-1b97-44ec-92d6-733976c0f0f8] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 908.697863] env[62503]: DEBUG oslo_vmware.api [None req-2026f2a3-ca55-406d-af72-4e933a561068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Task: {'id': task-1387909, 'name': CopyVirtualDisk_Task} progress is 54%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.725025] env[62503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-56eed01c-8b46-4c5f-9e18-7844829943b7 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.734377] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7867646-5a37-4bea-bf8c-450e27e21ea0 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.764177] env[62503]: DEBUG nova.compute.manager [req-02497162-d2ff-44c5-b9a2-e082024b191a req-c4459dfa-7e0d-4087-bd8a-61438202ed53 service nova] [instance: c39e7ee3-1b97-44ec-92d6-733976c0f0f8] Detach interface failed, port_id=ad311052-6c0b-4b0a-ad92-0d1198dbad4c, reason: Instance c39e7ee3-1b97-44ec-92d6-733976c0f0f8 could not be found. {{(pid=62503) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11261}} [ 908.845308] env[62503]: DEBUG nova.scheduler.client.report [None req-36792c89-a1e6-4e62-ac01-9fd3087c2872 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 909.056741] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df973f28-a08c-419d-bcde-7cee45e04f48 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.081222] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05f5f352-2cdc-427a-8489-b1c465a78332 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.100820] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-55afd046-fdf9-4041-8df2-ea3cb62954c4 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: c9129f68-c755-4b78-b067-b77b01048c02] Updating instance 'c9129f68-c755-4b78-b067-b77b01048c02' progress to 67 {{(pid=62503) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 909.194587] env[62503]: INFO nova.compute.manager [-] [instance: c39e7ee3-1b97-44ec-92d6-733976c0f0f8] Took 1.35 seconds to deallocate network for instance. [ 909.205588] env[62503]: DEBUG oslo_vmware.api [None req-2026f2a3-ca55-406d-af72-4e933a561068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Task: {'id': task-1387909, 'name': CopyVirtualDisk_Task} progress is 74%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.351590] env[62503]: DEBUG oslo_concurrency.lockutils [None req-36792c89-a1e6-4e62-ac01-9fd3087c2872 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.398s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 909.351964] env[62503]: DEBUG nova.compute.manager [None req-36792c89-a1e6-4e62-ac01-9fd3087c2872 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: 04054a79-70b6-409a-981f-6bf99fc3b4fc] Start building networks asynchronously for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2832}} [ 909.355597] env[62503]: DEBUG oslo_concurrency.lockutils [None req-2faef68b-c1ae-4dfa-9919-ba7d26bd1753 tempest-ServerTagsTestJSON-53821113 tempest-ServerTagsTestJSON-53821113-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.490s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 909.357354] env[62503]: INFO nova.compute.claims [None req-2faef68b-c1ae-4dfa-9919-ba7d26bd1753 tempest-ServerTagsTestJSON-53821113 tempest-ServerTagsTestJSON-53821113-project-member] [instance: c02c01b0-6f69-424f-8c0f-5a5c9ce1fad7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 909.372275] env[62503]: DEBUG oslo_vmware.rw_handles [None req-da58a238-4e42-4269-977f-a144551c2ae1 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/528e925a-2128-e88f-9716-9f3cc2b7bf3b/disk-0.vmdk. {{(pid=62503) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 909.373420] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b0a84a8-02f6-41f6-9348-4d7935063e94 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.380991] env[62503]: DEBUG oslo_vmware.rw_handles [None req-da58a238-4e42-4269-977f-a144551c2ae1 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/528e925a-2128-e88f-9716-9f3cc2b7bf3b/disk-0.vmdk is in state: ready. {{(pid=62503) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 909.381216] env[62503]: ERROR oslo_vmware.rw_handles [None req-da58a238-4e42-4269-977f-a144551c2ae1 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/528e925a-2128-e88f-9716-9f3cc2b7bf3b/disk-0.vmdk due to incomplete transfer. [ 909.381427] env[62503]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-9b36c01c-5213-409f-ac33-6a774f49337d {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.389998] env[62503]: DEBUG oslo_vmware.rw_handles [None req-da58a238-4e42-4269-977f-a144551c2ae1 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/528e925a-2128-e88f-9716-9f3cc2b7bf3b/disk-0.vmdk. {{(pid=62503) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 909.389998] env[62503]: DEBUG nova.virt.vmwareapi.images [None req-da58a238-4e42-4269-977f-a144551c2ae1 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: 16167e53-e45b-4b37-90c6-ab2c30ebf1aa] Uploaded image 7cbf51d8-1806-40f1-8e00-1394fe0bcfc8 to the Glance image server {{(pid=62503) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 909.392559] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-da58a238-4e42-4269-977f-a144551c2ae1 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: 16167e53-e45b-4b37-90c6-ab2c30ebf1aa] Destroying the VM {{(pid=62503) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 909.393684] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-e565eb04-718d-4764-bf9e-b97266b84b4c {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.400928] env[62503]: DEBUG oslo_vmware.api [None req-da58a238-4e42-4269-977f-a144551c2ae1 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Waiting for the task: (returnval){ [ 909.400928] env[62503]: value = "task-1387912" [ 909.400928] env[62503]: _type = "Task" [ 909.400928] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 909.409806] env[62503]: DEBUG oslo_vmware.api [None req-da58a238-4e42-4269-977f-a144551c2ae1 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Task: {'id': task-1387912, 'name': Destroy_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.468094] env[62503]: DEBUG oslo_concurrency.lockutils [None req-37eb9674-d233-4b69-a902-37260e3cf21d tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Acquiring lock "629054bb-8fdb-45a2-8c07-216c4104d4a6" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 909.468469] env[62503]: DEBUG oslo_concurrency.lockutils [None req-37eb9674-d233-4b69-a902-37260e3cf21d tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Lock "629054bb-8fdb-45a2-8c07-216c4104d4a6" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 909.468742] env[62503]: DEBUG nova.compute.manager [None req-37eb9674-d233-4b69-a902-37260e3cf21d tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] [instance: 629054bb-8fdb-45a2-8c07-216c4104d4a6] Checking state {{(pid=62503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1794}} [ 909.469702] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26f8e144-b359-4723-8092-0878620efd61 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.477157] env[62503]: DEBUG nova.compute.manager [None req-37eb9674-d233-4b69-a902-37260e3cf21d tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] [instance: 629054bb-8fdb-45a2-8c07-216c4104d4a6] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62503) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3375}} [ 909.477883] env[62503]: DEBUG nova.objects.instance [None req-37eb9674-d233-4b69-a902-37260e3cf21d tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Lazy-loading 'flavor' on Instance uuid 629054bb-8fdb-45a2-8c07-216c4104d4a6 {{(pid=62503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 909.653943] env[62503]: DEBUG nova.network.neutron [None req-55afd046-fdf9-4041-8df2-ea3cb62954c4 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: c9129f68-c755-4b78-b067-b77b01048c02] Port 792806f8-f6ea-4abd-9085-2a2ce83df26b binding to destination host cpu-1 is already ACTIVE {{(pid=62503) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 909.698648] env[62503]: DEBUG oslo_vmware.api [None req-2026f2a3-ca55-406d-af72-4e933a561068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Task: {'id': task-1387909, 'name': CopyVirtualDisk_Task} progress is 97%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.707478] env[62503]: DEBUG oslo_concurrency.lockutils [None req-4ff360a7-ca69-4732-812f-d6e86d096df9 tempest-InstanceActionsTestJSON-257954437 tempest-InstanceActionsTestJSON-257954437-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 909.862013] env[62503]: DEBUG nova.compute.utils [None req-36792c89-a1e6-4e62-ac01-9fd3087c2872 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Using /dev/sd instead of None {{(pid=62503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 909.865376] env[62503]: DEBUG nova.compute.manager [None req-36792c89-a1e6-4e62-ac01-9fd3087c2872 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: 04054a79-70b6-409a-981f-6bf99fc3b4fc] Allocating IP information in the background. {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 909.865600] env[62503]: DEBUG nova.network.neutron [None req-36792c89-a1e6-4e62-ac01-9fd3087c2872 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: 04054a79-70b6-409a-981f-6bf99fc3b4fc] allocate_for_instance() {{(pid=62503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 909.906510] env[62503]: DEBUG nova.policy [None req-36792c89-a1e6-4e62-ac01-9fd3087c2872 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '05e8f1583c434155ae657655880ba2c6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '44139c74b4b349af996a67f408a8441f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62503) authorize /opt/stack/nova/nova/policy.py:201}} [ 909.913579] env[62503]: DEBUG oslo_vmware.api [None req-da58a238-4e42-4269-977f-a144551c2ae1 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Task: {'id': task-1387912, 'name': Destroy_Task} progress is 33%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.140181] env[62503]: DEBUG nova.network.neutron [None req-36792c89-a1e6-4e62-ac01-9fd3087c2872 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: 04054a79-70b6-409a-981f-6bf99fc3b4fc] Successfully created port: a99ea24a-aa97-4bea-9fa6-904a83e3aaa0 {{(pid=62503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 910.210007] env[62503]: DEBUG oslo_vmware.api [None req-2026f2a3-ca55-406d-af72-4e933a561068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Task: {'id': task-1387909, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.597725} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 910.211896] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-2026f2a3-ca55-406d-af72-4e933a561068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/3306da0e-1d43-4f11-be11-5fe7cf1194eb/3306da0e-1d43-4f11-be11-5fe7cf1194eb.vmdk to [datastore1] c09488ed-e354-4abf-8999-b2f8afec44fc/c09488ed-e354-4abf-8999-b2f8afec44fc.vmdk {{(pid=62503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 910.213215] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-190389ed-c7be-4627-a374-ba3dce04e631 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.238943] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-2026f2a3-ca55-406d-af72-4e933a561068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: c09488ed-e354-4abf-8999-b2f8afec44fc] Reconfiguring VM instance instance-0000004f to attach disk [datastore1] c09488ed-e354-4abf-8999-b2f8afec44fc/c09488ed-e354-4abf-8999-b2f8afec44fc.vmdk or device None with type streamOptimized {{(pid=62503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 910.239625] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f9ed8823-24c2-4f6a-bc27-da5482817fac {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.259921] env[62503]: DEBUG oslo_vmware.api [None req-2026f2a3-ca55-406d-af72-4e933a561068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Waiting for the task: (returnval){ [ 910.259921] env[62503]: value = "task-1387913" [ 910.259921] env[62503]: _type = "Task" [ 910.259921] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 910.268118] env[62503]: DEBUG oslo_vmware.api [None req-2026f2a3-ca55-406d-af72-4e933a561068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Task: {'id': task-1387913, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.367103] env[62503]: DEBUG nova.compute.manager [None req-36792c89-a1e6-4e62-ac01-9fd3087c2872 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: 04054a79-70b6-409a-981f-6bf99fc3b4fc] Start building block device mappings for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2867}} [ 910.411343] env[62503]: DEBUG oslo_vmware.api [None req-da58a238-4e42-4269-977f-a144551c2ae1 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Task: {'id': task-1387912, 'name': Destroy_Task} progress is 33%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.485131] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-37eb9674-d233-4b69-a902-37260e3cf21d tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] [instance: 629054bb-8fdb-45a2-8c07-216c4104d4a6] Powering off the VM {{(pid=62503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 910.485459] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-be87ec50-179c-45de-879f-d3caae38bec2 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.493876] env[62503]: DEBUG oslo_vmware.api [None req-37eb9674-d233-4b69-a902-37260e3cf21d tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Waiting for the task: (returnval){ [ 910.493876] env[62503]: value = "task-1387914" [ 910.493876] env[62503]: _type = "Task" [ 910.493876] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 910.501196] env[62503]: DEBUG oslo_vmware.api [None req-37eb9674-d233-4b69-a902-37260e3cf21d tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Task: {'id': task-1387914, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.552532] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d94ea91b-e95e-418e-8534-b7be4d1dffbd {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.559913] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-012cb814-ae30-451c-a34d-8d8257a65ee0 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.590888] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb31d8f5-3e0a-427d-aae0-3defeeac1d7c {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.598820] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c73ee45-b494-40b8-b39d-7260a28e3e02 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.612013] env[62503]: DEBUG nova.compute.provider_tree [None req-2faef68b-c1ae-4dfa-9919-ba7d26bd1753 tempest-ServerTagsTestJSON-53821113 tempest-ServerTagsTestJSON-53821113-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 910.675286] env[62503]: DEBUG oslo_concurrency.lockutils [None req-55afd046-fdf9-4041-8df2-ea3cb62954c4 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Acquiring lock "c9129f68-c755-4b78-b067-b77b01048c02-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 910.675584] env[62503]: DEBUG oslo_concurrency.lockutils [None req-55afd046-fdf9-4041-8df2-ea3cb62954c4 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Lock "c9129f68-c755-4b78-b067-b77b01048c02-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 910.675702] env[62503]: DEBUG oslo_concurrency.lockutils [None req-55afd046-fdf9-4041-8df2-ea3cb62954c4 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Lock "c9129f68-c755-4b78-b067-b77b01048c02-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 910.769842] env[62503]: DEBUG oslo_vmware.api [None req-2026f2a3-ca55-406d-af72-4e933a561068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Task: {'id': task-1387913, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.912229] env[62503]: DEBUG oslo_vmware.api [None req-da58a238-4e42-4269-977f-a144551c2ae1 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Task: {'id': task-1387912, 'name': Destroy_Task, 'duration_secs': 1.325874} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 910.912469] env[62503]: INFO nova.virt.vmwareapi.vm_util [None req-da58a238-4e42-4269-977f-a144551c2ae1 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: 16167e53-e45b-4b37-90c6-ab2c30ebf1aa] Destroyed the VM [ 910.912787] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-da58a238-4e42-4269-977f-a144551c2ae1 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: 16167e53-e45b-4b37-90c6-ab2c30ebf1aa] Deleting Snapshot of the VM instance {{(pid=62503) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 910.913111] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-4616ecda-600a-4aed-9201-2cc928950384 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.920357] env[62503]: DEBUG oslo_vmware.api [None req-da58a238-4e42-4269-977f-a144551c2ae1 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Waiting for the task: (returnval){ [ 910.920357] env[62503]: value = "task-1387915" [ 910.920357] env[62503]: _type = "Task" [ 910.920357] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 910.928063] env[62503]: DEBUG oslo_vmware.api [None req-da58a238-4e42-4269-977f-a144551c2ae1 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Task: {'id': task-1387915, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.007445] env[62503]: DEBUG oslo_vmware.api [None req-37eb9674-d233-4b69-a902-37260e3cf21d tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Task: {'id': task-1387914, 'name': PowerOffVM_Task, 'duration_secs': 0.345767} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 911.007736] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-37eb9674-d233-4b69-a902-37260e3cf21d tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] [instance: 629054bb-8fdb-45a2-8c07-216c4104d4a6] Powered off the VM {{(pid=62503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 911.007946] env[62503]: DEBUG nova.compute.manager [None req-37eb9674-d233-4b69-a902-37260e3cf21d tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] [instance: 629054bb-8fdb-45a2-8c07-216c4104d4a6] Checking state {{(pid=62503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1794}} [ 911.008890] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88b54e63-809e-413a-b91d-be8ed889b7ca {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.116057] env[62503]: DEBUG nova.scheduler.client.report [None req-2faef68b-c1ae-4dfa-9919-ba7d26bd1753 tempest-ServerTagsTestJSON-53821113 tempest-ServerTagsTestJSON-53821113-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 911.270120] env[62503]: DEBUG oslo_vmware.api [None req-2026f2a3-ca55-406d-af72-4e933a561068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Task: {'id': task-1387913, 'name': ReconfigVM_Task, 'duration_secs': 0.825415} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 911.270416] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-2026f2a3-ca55-406d-af72-4e933a561068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: c09488ed-e354-4abf-8999-b2f8afec44fc] Reconfigured VM instance instance-0000004f to attach disk [datastore1] c09488ed-e354-4abf-8999-b2f8afec44fc/c09488ed-e354-4abf-8999-b2f8afec44fc.vmdk or device None with type streamOptimized {{(pid=62503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 911.271130] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fbbed280-4e71-4621-80a3-131455260070 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.277619] env[62503]: DEBUG oslo_vmware.api [None req-2026f2a3-ca55-406d-af72-4e933a561068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Waiting for the task: (returnval){ [ 911.277619] env[62503]: value = "task-1387916" [ 911.277619] env[62503]: _type = "Task" [ 911.277619] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 911.285158] env[62503]: DEBUG oslo_vmware.api [None req-2026f2a3-ca55-406d-af72-4e933a561068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Task: {'id': task-1387916, 'name': Rename_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.379687] env[62503]: DEBUG nova.compute.manager [None req-36792c89-a1e6-4e62-ac01-9fd3087c2872 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: 04054a79-70b6-409a-981f-6bf99fc3b4fc] Start spawning the instance on the hypervisor. {{(pid=62503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2641}} [ 911.407874] env[62503]: DEBUG nova.virt.hardware [None req-36792c89-a1e6-4e62-ac01-9fd3087c2872 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:26:20Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:26:03Z,direct_url=,disk_format='vmdk',id=8150ca02-f879-471d-8913-459408f127a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26d9ee75d25b4018b8bb1fb11c8bd98c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:26:04Z,virtual_size=,visibility=), allow threads: False {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 911.408159] env[62503]: DEBUG nova.virt.hardware [None req-36792c89-a1e6-4e62-ac01-9fd3087c2872 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Flavor limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 911.408328] env[62503]: DEBUG nova.virt.hardware [None req-36792c89-a1e6-4e62-ac01-9fd3087c2872 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Image limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 911.408549] env[62503]: DEBUG nova.virt.hardware [None req-36792c89-a1e6-4e62-ac01-9fd3087c2872 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Flavor pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 911.408737] env[62503]: DEBUG nova.virt.hardware [None req-36792c89-a1e6-4e62-ac01-9fd3087c2872 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Image pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 911.408897] env[62503]: DEBUG nova.virt.hardware [None req-36792c89-a1e6-4e62-ac01-9fd3087c2872 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 911.409132] env[62503]: DEBUG nova.virt.hardware [None req-36792c89-a1e6-4e62-ac01-9fd3087c2872 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 911.409288] env[62503]: DEBUG nova.virt.hardware [None req-36792c89-a1e6-4e62-ac01-9fd3087c2872 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 911.409471] env[62503]: DEBUG nova.virt.hardware [None req-36792c89-a1e6-4e62-ac01-9fd3087c2872 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Got 1 possible topologies {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 911.409632] env[62503]: DEBUG nova.virt.hardware [None req-36792c89-a1e6-4e62-ac01-9fd3087c2872 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 911.409811] env[62503]: DEBUG nova.virt.hardware [None req-36792c89-a1e6-4e62-ac01-9fd3087c2872 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 911.410834] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99e25d11-4b60-47d4-b43f-0ae5a23e1351 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.418977] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8004d771-580d-4786-b4cb-8b047d8ee843 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.441750] env[62503]: DEBUG oslo_vmware.api [None req-da58a238-4e42-4269-977f-a144551c2ae1 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Task: {'id': task-1387915, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.525926] env[62503]: DEBUG oslo_concurrency.lockutils [None req-37eb9674-d233-4b69-a902-37260e3cf21d tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Lock "629054bb-8fdb-45a2-8c07-216c4104d4a6" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.057s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 911.620887] env[62503]: DEBUG oslo_concurrency.lockutils [None req-2faef68b-c1ae-4dfa-9919-ba7d26bd1753 tempest-ServerTagsTestJSON-53821113 tempest-ServerTagsTestJSON-53821113-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.265s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 911.621793] env[62503]: DEBUG nova.compute.manager [None req-2faef68b-c1ae-4dfa-9919-ba7d26bd1753 tempest-ServerTagsTestJSON-53821113 tempest-ServerTagsTestJSON-53821113-project-member] [instance: c02c01b0-6f69-424f-8c0f-5a5c9ce1fad7] Start building networks asynchronously for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2832}} [ 911.625201] env[62503]: DEBUG oslo_concurrency.lockutils [None req-4ff360a7-ca69-4732-812f-d6e86d096df9 tempest-InstanceActionsTestJSON-257954437 tempest-InstanceActionsTestJSON-257954437-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.917s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 911.625201] env[62503]: DEBUG nova.objects.instance [None req-4ff360a7-ca69-4732-812f-d6e86d096df9 tempest-InstanceActionsTestJSON-257954437 tempest-InstanceActionsTestJSON-257954437-project-member] Lazy-loading 'resources' on Instance uuid c39e7ee3-1b97-44ec-92d6-733976c0f0f8 {{(pid=62503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 911.734691] env[62503]: DEBUG nova.compute.manager [req-2a7dddd5-4139-4a29-a0cf-957a9d00b941 req-5309a3af-3fea-422d-b439-c3733e6bbbd8 service nova] [instance: 04054a79-70b6-409a-981f-6bf99fc3b4fc] Received event network-vif-plugged-a99ea24a-aa97-4bea-9fa6-904a83e3aaa0 {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 911.734915] env[62503]: DEBUG oslo_concurrency.lockutils [req-2a7dddd5-4139-4a29-a0cf-957a9d00b941 req-5309a3af-3fea-422d-b439-c3733e6bbbd8 service nova] Acquiring lock "04054a79-70b6-409a-981f-6bf99fc3b4fc-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 911.735146] env[62503]: DEBUG oslo_concurrency.lockutils [req-2a7dddd5-4139-4a29-a0cf-957a9d00b941 req-5309a3af-3fea-422d-b439-c3733e6bbbd8 service nova] Lock "04054a79-70b6-409a-981f-6bf99fc3b4fc-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 911.735318] env[62503]: DEBUG oslo_concurrency.lockutils [req-2a7dddd5-4139-4a29-a0cf-957a9d00b941 req-5309a3af-3fea-422d-b439-c3733e6bbbd8 service nova] Lock "04054a79-70b6-409a-981f-6bf99fc3b4fc-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 911.735484] env[62503]: DEBUG nova.compute.manager [req-2a7dddd5-4139-4a29-a0cf-957a9d00b941 req-5309a3af-3fea-422d-b439-c3733e6bbbd8 service nova] [instance: 04054a79-70b6-409a-981f-6bf99fc3b4fc] No waiting events found dispatching network-vif-plugged-a99ea24a-aa97-4bea-9fa6-904a83e3aaa0 {{(pid=62503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 911.735649] env[62503]: WARNING nova.compute.manager [req-2a7dddd5-4139-4a29-a0cf-957a9d00b941 req-5309a3af-3fea-422d-b439-c3733e6bbbd8 service nova] [instance: 04054a79-70b6-409a-981f-6bf99fc3b4fc] Received unexpected event network-vif-plugged-a99ea24a-aa97-4bea-9fa6-904a83e3aaa0 for instance with vm_state building and task_state spawning. [ 911.745031] env[62503]: DEBUG oslo_concurrency.lockutils [None req-55afd046-fdf9-4041-8df2-ea3cb62954c4 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Acquiring lock "refresh_cache-c9129f68-c755-4b78-b067-b77b01048c02" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 911.745216] env[62503]: DEBUG oslo_concurrency.lockutils [None req-55afd046-fdf9-4041-8df2-ea3cb62954c4 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Acquired lock "refresh_cache-c9129f68-c755-4b78-b067-b77b01048c02" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 911.745404] env[62503]: DEBUG nova.network.neutron [None req-55afd046-fdf9-4041-8df2-ea3cb62954c4 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: c9129f68-c755-4b78-b067-b77b01048c02] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 911.790256] env[62503]: DEBUG oslo_vmware.api [None req-2026f2a3-ca55-406d-af72-4e933a561068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Task: {'id': task-1387916, 'name': Rename_Task, 'duration_secs': 0.341406} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 911.790256] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-2026f2a3-ca55-406d-af72-4e933a561068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: c09488ed-e354-4abf-8999-b2f8afec44fc] Powering on the VM {{(pid=62503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 911.790256] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-dedc26f2-686b-4536-815b-2bb79ad5b278 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.797120] env[62503]: DEBUG oslo_vmware.api [None req-2026f2a3-ca55-406d-af72-4e933a561068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Waiting for the task: (returnval){ [ 911.797120] env[62503]: value = "task-1387917" [ 911.797120] env[62503]: _type = "Task" [ 911.797120] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 911.805059] env[62503]: DEBUG oslo_vmware.api [None req-2026f2a3-ca55-406d-af72-4e933a561068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Task: {'id': task-1387917, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.937730] env[62503]: DEBUG oslo_vmware.api [None req-da58a238-4e42-4269-977f-a144551c2ae1 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Task: {'id': task-1387915, 'name': RemoveSnapshot_Task, 'duration_secs': 0.877248} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 911.938157] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-da58a238-4e42-4269-977f-a144551c2ae1 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: 16167e53-e45b-4b37-90c6-ab2c30ebf1aa] Deleted Snapshot of the VM instance {{(pid=62503) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 911.938569] env[62503]: DEBUG nova.compute.manager [None req-da58a238-4e42-4269-977f-a144551c2ae1 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: 16167e53-e45b-4b37-90c6-ab2c30ebf1aa] Checking state {{(pid=62503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1794}} [ 911.939702] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32adf021-1ef5-4334-8f12-5367b2157a32 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.128091] env[62503]: DEBUG nova.compute.utils [None req-2faef68b-c1ae-4dfa-9919-ba7d26bd1753 tempest-ServerTagsTestJSON-53821113 tempest-ServerTagsTestJSON-53821113-project-member] Using /dev/sd instead of None {{(pid=62503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 912.132909] env[62503]: DEBUG nova.compute.manager [None req-2faef68b-c1ae-4dfa-9919-ba7d26bd1753 tempest-ServerTagsTestJSON-53821113 tempest-ServerTagsTestJSON-53821113-project-member] [instance: c02c01b0-6f69-424f-8c0f-5a5c9ce1fad7] Allocating IP information in the background. {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 912.133110] env[62503]: DEBUG nova.network.neutron [None req-2faef68b-c1ae-4dfa-9919-ba7d26bd1753 tempest-ServerTagsTestJSON-53821113 tempest-ServerTagsTestJSON-53821113-project-member] [instance: c02c01b0-6f69-424f-8c0f-5a5c9ce1fad7] allocate_for_instance() {{(pid=62503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 912.170431] env[62503]: DEBUG nova.policy [None req-2faef68b-c1ae-4dfa-9919-ba7d26bd1753 tempest-ServerTagsTestJSON-53821113 tempest-ServerTagsTestJSON-53821113-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '522e148fb677481b8d49997d0f1f97ce', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '53477edca58c448aaada8c1b6d65b070', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62503) authorize /opt/stack/nova/nova/policy.py:201}} [ 912.174211] env[62503]: DEBUG nova.objects.instance [None req-64128f5f-d924-444f-a79b-4882784484bc tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Lazy-loading 'flavor' on Instance uuid 629054bb-8fdb-45a2-8c07-216c4104d4a6 {{(pid=62503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 912.206660] env[62503]: DEBUG nova.network.neutron [None req-36792c89-a1e6-4e62-ac01-9fd3087c2872 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: 04054a79-70b6-409a-981f-6bf99fc3b4fc] Successfully updated port: a99ea24a-aa97-4bea-9fa6-904a83e3aaa0 {{(pid=62503) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 912.231684] env[62503]: DEBUG nova.compute.manager [req-08641018-a6a4-4f42-b663-0328fc35e4f5 req-1fec1c78-4a12-4c86-8a25-247099e29273 service nova] [instance: 04054a79-70b6-409a-981f-6bf99fc3b4fc] Received event network-changed-a99ea24a-aa97-4bea-9fa6-904a83e3aaa0 {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 912.232083] env[62503]: DEBUG nova.compute.manager [req-08641018-a6a4-4f42-b663-0328fc35e4f5 req-1fec1c78-4a12-4c86-8a25-247099e29273 service nova] [instance: 04054a79-70b6-409a-981f-6bf99fc3b4fc] Refreshing instance network info cache due to event network-changed-a99ea24a-aa97-4bea-9fa6-904a83e3aaa0. {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11432}} [ 912.232083] env[62503]: DEBUG oslo_concurrency.lockutils [req-08641018-a6a4-4f42-b663-0328fc35e4f5 req-1fec1c78-4a12-4c86-8a25-247099e29273 service nova] Acquiring lock "refresh_cache-04054a79-70b6-409a-981f-6bf99fc3b4fc" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 912.232265] env[62503]: DEBUG oslo_concurrency.lockutils [req-08641018-a6a4-4f42-b663-0328fc35e4f5 req-1fec1c78-4a12-4c86-8a25-247099e29273 service nova] Acquired lock "refresh_cache-04054a79-70b6-409a-981f-6bf99fc3b4fc" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 912.232395] env[62503]: DEBUG nova.network.neutron [req-08641018-a6a4-4f42-b663-0328fc35e4f5 req-1fec1c78-4a12-4c86-8a25-247099e29273 service nova] [instance: 04054a79-70b6-409a-981f-6bf99fc3b4fc] Refreshing network info cache for port a99ea24a-aa97-4bea-9fa6-904a83e3aaa0 {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 912.309145] env[62503]: DEBUG oslo_vmware.api [None req-2026f2a3-ca55-406d-af72-4e933a561068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Task: {'id': task-1387917, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.317018] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73d58f3f-233f-47ad-9b04-42a8a8e8b887 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.324575] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96982b46-ee6f-4acd-b866-c61a7e898570 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.356605] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e8620f7-cabe-4c67-84d4-8c39a5597b6e {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.363387] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e891ccc7-649e-40b8-8c4d-ff4a3b86e599 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.376993] env[62503]: DEBUG nova.compute.provider_tree [None req-4ff360a7-ca69-4732-812f-d6e86d096df9 tempest-InstanceActionsTestJSON-257954437 tempest-InstanceActionsTestJSON-257954437-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 912.446776] env[62503]: DEBUG nova.network.neutron [None req-2faef68b-c1ae-4dfa-9919-ba7d26bd1753 tempest-ServerTagsTestJSON-53821113 tempest-ServerTagsTestJSON-53821113-project-member] [instance: c02c01b0-6f69-424f-8c0f-5a5c9ce1fad7] Successfully created port: 21de8ca1-8ec9-4ca1-8a56-68c6d367064e {{(pid=62503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 912.460665] env[62503]: INFO nova.compute.manager [None req-da58a238-4e42-4269-977f-a144551c2ae1 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: 16167e53-e45b-4b37-90c6-ab2c30ebf1aa] Shelve offloading [ 912.632668] env[62503]: DEBUG nova.compute.manager [None req-2faef68b-c1ae-4dfa-9919-ba7d26bd1753 tempest-ServerTagsTestJSON-53821113 tempest-ServerTagsTestJSON-53821113-project-member] [instance: c02c01b0-6f69-424f-8c0f-5a5c9ce1fad7] Start building block device mappings for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2867}} [ 912.681166] env[62503]: DEBUG oslo_concurrency.lockutils [None req-64128f5f-d924-444f-a79b-4882784484bc tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Acquiring lock "refresh_cache-629054bb-8fdb-45a2-8c07-216c4104d4a6" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 912.681166] env[62503]: DEBUG oslo_concurrency.lockutils [None req-64128f5f-d924-444f-a79b-4882784484bc tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Acquired lock "refresh_cache-629054bb-8fdb-45a2-8c07-216c4104d4a6" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 912.681504] env[62503]: DEBUG nova.network.neutron [None req-64128f5f-d924-444f-a79b-4882784484bc tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] [instance: 629054bb-8fdb-45a2-8c07-216c4104d4a6] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 912.681676] env[62503]: DEBUG nova.objects.instance [None req-64128f5f-d924-444f-a79b-4882784484bc tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Lazy-loading 'info_cache' on Instance uuid 629054bb-8fdb-45a2-8c07-216c4104d4a6 {{(pid=62503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 912.695209] env[62503]: DEBUG nova.network.neutron [None req-55afd046-fdf9-4041-8df2-ea3cb62954c4 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: c9129f68-c755-4b78-b067-b77b01048c02] Updating instance_info_cache with network_info: [{"id": "792806f8-f6ea-4abd-9085-2a2ce83df26b", "address": "fa:16:3e:17:08:5e", "network": {"id": "8feb2896-b13e-4845-9a81-881bf941f703", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1117860439-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0849093c8b48400a8e9d56171ea99e8f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92cdccfd-4b10-4024-b724-5f22792dd4de", "external-id": "nsx-vlan-transportzone-902", "segmentation_id": 902, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap792806f8-f6", "ovs_interfaceid": "792806f8-f6ea-4abd-9085-2a2ce83df26b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 912.712699] env[62503]: DEBUG oslo_concurrency.lockutils [None req-36792c89-a1e6-4e62-ac01-9fd3087c2872 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Acquiring lock "refresh_cache-04054a79-70b6-409a-981f-6bf99fc3b4fc" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 912.764137] env[62503]: DEBUG nova.network.neutron [req-08641018-a6a4-4f42-b663-0328fc35e4f5 req-1fec1c78-4a12-4c86-8a25-247099e29273 service nova] [instance: 04054a79-70b6-409a-981f-6bf99fc3b4fc] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 912.807380] env[62503]: DEBUG oslo_vmware.api [None req-2026f2a3-ca55-406d-af72-4e933a561068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Task: {'id': task-1387917, 'name': PowerOnVM_Task, 'duration_secs': 0.814028} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 912.807675] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-2026f2a3-ca55-406d-af72-4e933a561068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: c09488ed-e354-4abf-8999-b2f8afec44fc] Powered on the VM {{(pid=62503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 912.807838] env[62503]: INFO nova.compute.manager [None req-2026f2a3-ca55-406d-af72-4e933a561068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: c09488ed-e354-4abf-8999-b2f8afec44fc] Took 16.20 seconds to spawn the instance on the hypervisor. [ 912.808030] env[62503]: DEBUG nova.compute.manager [None req-2026f2a3-ca55-406d-af72-4e933a561068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: c09488ed-e354-4abf-8999-b2f8afec44fc] Checking state {{(pid=62503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1794}} [ 912.808827] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1603efa1-cd50-45dd-a6bf-d03088c99841 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.879012] env[62503]: DEBUG nova.network.neutron [req-08641018-a6a4-4f42-b663-0328fc35e4f5 req-1fec1c78-4a12-4c86-8a25-247099e29273 service nova] [instance: 04054a79-70b6-409a-981f-6bf99fc3b4fc] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 912.880950] env[62503]: DEBUG nova.scheduler.client.report [None req-4ff360a7-ca69-4732-812f-d6e86d096df9 tempest-InstanceActionsTestJSON-257954437 tempest-InstanceActionsTestJSON-257954437-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 912.966461] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-da58a238-4e42-4269-977f-a144551c2ae1 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: 16167e53-e45b-4b37-90c6-ab2c30ebf1aa] Powering off the VM {{(pid=62503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 912.966794] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3fdc388a-6013-466a-a32f-4cd8bd171050 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.975643] env[62503]: DEBUG oslo_vmware.api [None req-da58a238-4e42-4269-977f-a144551c2ae1 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Waiting for the task: (returnval){ [ 912.975643] env[62503]: value = "task-1387918" [ 912.975643] env[62503]: _type = "Task" [ 912.975643] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 912.983286] env[62503]: DEBUG oslo_vmware.api [None req-da58a238-4e42-4269-977f-a144551c2ae1 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Task: {'id': task-1387918, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.185265] env[62503]: DEBUG nova.objects.base [None req-64128f5f-d924-444f-a79b-4882784484bc tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Object Instance<629054bb-8fdb-45a2-8c07-216c4104d4a6> lazy-loaded attributes: flavor,info_cache {{(pid=62503) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 913.197824] env[62503]: DEBUG oslo_concurrency.lockutils [None req-55afd046-fdf9-4041-8df2-ea3cb62954c4 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Releasing lock "refresh_cache-c9129f68-c755-4b78-b067-b77b01048c02" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 913.327068] env[62503]: INFO nova.compute.manager [None req-2026f2a3-ca55-406d-af72-4e933a561068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: c09488ed-e354-4abf-8999-b2f8afec44fc] Took 29.59 seconds to build instance. [ 913.385861] env[62503]: DEBUG oslo_concurrency.lockutils [req-08641018-a6a4-4f42-b663-0328fc35e4f5 req-1fec1c78-4a12-4c86-8a25-247099e29273 service nova] Releasing lock "refresh_cache-04054a79-70b6-409a-981f-6bf99fc3b4fc" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 913.386726] env[62503]: DEBUG oslo_concurrency.lockutils [None req-4ff360a7-ca69-4732-812f-d6e86d096df9 tempest-InstanceActionsTestJSON-257954437 tempest-InstanceActionsTestJSON-257954437-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.762s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 913.389104] env[62503]: DEBUG oslo_concurrency.lockutils [None req-36792c89-a1e6-4e62-ac01-9fd3087c2872 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Acquired lock "refresh_cache-04054a79-70b6-409a-981f-6bf99fc3b4fc" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 913.389323] env[62503]: DEBUG nova.network.neutron [None req-36792c89-a1e6-4e62-ac01-9fd3087c2872 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: 04054a79-70b6-409a-981f-6bf99fc3b4fc] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 913.414475] env[62503]: INFO nova.scheduler.client.report [None req-4ff360a7-ca69-4732-812f-d6e86d096df9 tempest-InstanceActionsTestJSON-257954437 tempest-InstanceActionsTestJSON-257954437-project-member] Deleted allocations for instance c39e7ee3-1b97-44ec-92d6-733976c0f0f8 [ 913.488406] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-da58a238-4e42-4269-977f-a144551c2ae1 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: 16167e53-e45b-4b37-90c6-ab2c30ebf1aa] VM already powered off {{(pid=62503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 913.488793] env[62503]: DEBUG nova.compute.manager [None req-da58a238-4e42-4269-977f-a144551c2ae1 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: 16167e53-e45b-4b37-90c6-ab2c30ebf1aa] Checking state {{(pid=62503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1794}} [ 913.489743] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99a5f96f-737f-486d-9f54-d8db13d6c067 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.495970] env[62503]: DEBUG oslo_concurrency.lockutils [None req-da58a238-4e42-4269-977f-a144551c2ae1 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Acquiring lock "refresh_cache-16167e53-e45b-4b37-90c6-ab2c30ebf1aa" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 913.496179] env[62503]: DEBUG oslo_concurrency.lockutils [None req-da58a238-4e42-4269-977f-a144551c2ae1 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Acquired lock "refresh_cache-16167e53-e45b-4b37-90c6-ab2c30ebf1aa" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 913.496351] env[62503]: DEBUG nova.network.neutron [None req-da58a238-4e42-4269-977f-a144551c2ae1 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: 16167e53-e45b-4b37-90c6-ab2c30ebf1aa] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 913.644097] env[62503]: DEBUG nova.compute.manager [None req-2faef68b-c1ae-4dfa-9919-ba7d26bd1753 tempest-ServerTagsTestJSON-53821113 tempest-ServerTagsTestJSON-53821113-project-member] [instance: c02c01b0-6f69-424f-8c0f-5a5c9ce1fad7] Start spawning the instance on the hypervisor. {{(pid=62503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2641}} [ 913.669259] env[62503]: DEBUG nova.virt.hardware [None req-2faef68b-c1ae-4dfa-9919-ba7d26bd1753 tempest-ServerTagsTestJSON-53821113 tempest-ServerTagsTestJSON-53821113-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:26:20Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:26:03Z,direct_url=,disk_format='vmdk',id=8150ca02-f879-471d-8913-459408f127a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26d9ee75d25b4018b8bb1fb11c8bd98c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:26:04Z,virtual_size=,visibility=), allow threads: False {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 913.669531] env[62503]: DEBUG nova.virt.hardware [None req-2faef68b-c1ae-4dfa-9919-ba7d26bd1753 tempest-ServerTagsTestJSON-53821113 tempest-ServerTagsTestJSON-53821113-project-member] Flavor limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 913.669672] env[62503]: DEBUG nova.virt.hardware [None req-2faef68b-c1ae-4dfa-9919-ba7d26bd1753 tempest-ServerTagsTestJSON-53821113 tempest-ServerTagsTestJSON-53821113-project-member] Image limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 913.669857] env[62503]: DEBUG nova.virt.hardware [None req-2faef68b-c1ae-4dfa-9919-ba7d26bd1753 tempest-ServerTagsTestJSON-53821113 tempest-ServerTagsTestJSON-53821113-project-member] Flavor pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 913.670012] env[62503]: DEBUG nova.virt.hardware [None req-2faef68b-c1ae-4dfa-9919-ba7d26bd1753 tempest-ServerTagsTestJSON-53821113 tempest-ServerTagsTestJSON-53821113-project-member] Image pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 913.670170] env[62503]: DEBUG nova.virt.hardware [None req-2faef68b-c1ae-4dfa-9919-ba7d26bd1753 tempest-ServerTagsTestJSON-53821113 tempest-ServerTagsTestJSON-53821113-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 913.670381] env[62503]: DEBUG nova.virt.hardware [None req-2faef68b-c1ae-4dfa-9919-ba7d26bd1753 tempest-ServerTagsTestJSON-53821113 tempest-ServerTagsTestJSON-53821113-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 913.670542] env[62503]: DEBUG nova.virt.hardware [None req-2faef68b-c1ae-4dfa-9919-ba7d26bd1753 tempest-ServerTagsTestJSON-53821113 tempest-ServerTagsTestJSON-53821113-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 913.670749] env[62503]: DEBUG nova.virt.hardware [None req-2faef68b-c1ae-4dfa-9919-ba7d26bd1753 tempest-ServerTagsTestJSON-53821113 tempest-ServerTagsTestJSON-53821113-project-member] Got 1 possible topologies {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 913.670915] env[62503]: DEBUG nova.virt.hardware [None req-2faef68b-c1ae-4dfa-9919-ba7d26bd1753 tempest-ServerTagsTestJSON-53821113 tempest-ServerTagsTestJSON-53821113-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 913.671113] env[62503]: DEBUG nova.virt.hardware [None req-2faef68b-c1ae-4dfa-9919-ba7d26bd1753 tempest-ServerTagsTestJSON-53821113 tempest-ServerTagsTestJSON-53821113-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 913.671963] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c7d47df-44e5-4b80-84ea-0b9182a33c76 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.679651] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d84cecf6-84ed-4cfd-8f29-c681e270f42e {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.721126] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3e5312f-7e2b-4b45-8f1a-65baa4a0ae6d {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.741450] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77bd9282-e0a4-4122-bfbe-71cc57001ee4 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.748921] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-55afd046-fdf9-4041-8df2-ea3cb62954c4 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: c9129f68-c755-4b78-b067-b77b01048c02] Updating instance 'c9129f68-c755-4b78-b067-b77b01048c02' progress to 83 {{(pid=62503) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 913.829177] env[62503]: DEBUG oslo_concurrency.lockutils [None req-2026f2a3-ca55-406d-af72-4e933a561068 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Lock "c09488ed-e354-4abf-8999-b2f8afec44fc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 31.101s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 913.923323] env[62503]: DEBUG oslo_concurrency.lockutils [None req-4ff360a7-ca69-4732-812f-d6e86d096df9 tempest-InstanceActionsTestJSON-257954437 tempest-InstanceActionsTestJSON-257954437-project-member] Lock "c39e7ee3-1b97-44ec-92d6-733976c0f0f8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.712s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 913.946063] env[62503]: DEBUG nova.network.neutron [None req-36792c89-a1e6-4e62-ac01-9fd3087c2872 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: 04054a79-70b6-409a-981f-6bf99fc3b4fc] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 913.960902] env[62503]: DEBUG nova.network.neutron [None req-64128f5f-d924-444f-a79b-4882784484bc tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] [instance: 629054bb-8fdb-45a2-8c07-216c4104d4a6] Updating instance_info_cache with network_info: [{"id": "269d53b9-4861-484d-bdac-553a7fafc310", "address": "fa:16:3e:02:be:62", "network": {"id": "db32bcf6-4f31-49a6-a030-459c7952b72c", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1233897985-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eac2608fc52a497f961d018c888a826f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271a82f1-1d09-4ad3-9c15-07269bad114c", "external-id": "nsx-vlan-transportzone-441", "segmentation_id": 441, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap269d53b9-48", "ovs_interfaceid": "269d53b9-4861-484d-bdac-553a7fafc310", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 913.993680] env[62503]: DEBUG nova.compute.manager [req-375e3c10-6787-4246-8ab3-066105de692c req-b0b5640c-89d6-4bd4-8b90-1b02705d1a35 service nova] [instance: c02c01b0-6f69-424f-8c0f-5a5c9ce1fad7] Received event network-vif-plugged-21de8ca1-8ec9-4ca1-8a56-68c6d367064e {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 913.993840] env[62503]: DEBUG oslo_concurrency.lockutils [req-375e3c10-6787-4246-8ab3-066105de692c req-b0b5640c-89d6-4bd4-8b90-1b02705d1a35 service nova] Acquiring lock "c02c01b0-6f69-424f-8c0f-5a5c9ce1fad7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 913.994028] env[62503]: DEBUG oslo_concurrency.lockutils [req-375e3c10-6787-4246-8ab3-066105de692c req-b0b5640c-89d6-4bd4-8b90-1b02705d1a35 service nova] Lock "c02c01b0-6f69-424f-8c0f-5a5c9ce1fad7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 913.994348] env[62503]: DEBUG oslo_concurrency.lockutils [req-375e3c10-6787-4246-8ab3-066105de692c req-b0b5640c-89d6-4bd4-8b90-1b02705d1a35 service nova] Lock "c02c01b0-6f69-424f-8c0f-5a5c9ce1fad7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 913.994569] env[62503]: DEBUG nova.compute.manager [req-375e3c10-6787-4246-8ab3-066105de692c req-b0b5640c-89d6-4bd4-8b90-1b02705d1a35 service nova] [instance: c02c01b0-6f69-424f-8c0f-5a5c9ce1fad7] No waiting events found dispatching network-vif-plugged-21de8ca1-8ec9-4ca1-8a56-68c6d367064e {{(pid=62503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 913.994805] env[62503]: WARNING nova.compute.manager [req-375e3c10-6787-4246-8ab3-066105de692c req-b0b5640c-89d6-4bd4-8b90-1b02705d1a35 service nova] [instance: c02c01b0-6f69-424f-8c0f-5a5c9ce1fad7] Received unexpected event network-vif-plugged-21de8ca1-8ec9-4ca1-8a56-68c6d367064e for instance with vm_state building and task_state spawning. [ 914.198869] env[62503]: DEBUG nova.network.neutron [None req-2faef68b-c1ae-4dfa-9919-ba7d26bd1753 tempest-ServerTagsTestJSON-53821113 tempest-ServerTagsTestJSON-53821113-project-member] [instance: c02c01b0-6f69-424f-8c0f-5a5c9ce1fad7] Successfully updated port: 21de8ca1-8ec9-4ca1-8a56-68c6d367064e {{(pid=62503) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 914.221956] env[62503]: DEBUG nova.network.neutron [None req-36792c89-a1e6-4e62-ac01-9fd3087c2872 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: 04054a79-70b6-409a-981f-6bf99fc3b4fc] Updating instance_info_cache with network_info: [{"id": "a99ea24a-aa97-4bea-9fa6-904a83e3aaa0", "address": "fa:16:3e:aa:ea:1e", "network": {"id": "ae12a8a2-c7af-40e0-878a-95b9a17d8356", "bridge": "br-int", "label": "tempest-ServersTestJSON-1776284685-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "44139c74b4b349af996a67f408a8441f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51ae336c-12cf-406a-b1ca-54e9ce553b3e", "external-id": "nsx-vlan-transportzone-30", "segmentation_id": 30, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa99ea24a-aa", "ovs_interfaceid": "a99ea24a-aa97-4bea-9fa6-904a83e3aaa0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 914.255511] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-55afd046-fdf9-4041-8df2-ea3cb62954c4 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: c9129f68-c755-4b78-b067-b77b01048c02] Powering on the VM {{(pid=62503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 914.256529] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8ac61f46-e534-4e4b-93dd-b37b668e594e {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.266604] env[62503]: DEBUG oslo_vmware.api [None req-55afd046-fdf9-4041-8df2-ea3cb62954c4 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Waiting for the task: (returnval){ [ 914.266604] env[62503]: value = "task-1387919" [ 914.266604] env[62503]: _type = "Task" [ 914.266604] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 914.278850] env[62503]: DEBUG oslo_vmware.api [None req-55afd046-fdf9-4041-8df2-ea3cb62954c4 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Task: {'id': task-1387919, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.290208] env[62503]: DEBUG nova.network.neutron [None req-da58a238-4e42-4269-977f-a144551c2ae1 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: 16167e53-e45b-4b37-90c6-ab2c30ebf1aa] Updating instance_info_cache with network_info: [{"id": "632bec04-d9b9-4178-bb3c-104caa5ee159", "address": "fa:16:3e:f5:45:70", "network": {"id": "fd4d9644-88d7-4c3d-9bee-9e25ca3a7d0b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2096443058-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f823912f7b1a4998a6dbc22060cf6c5e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "685b4083-b748-41fb-a68a-273b1073fa28", "external-id": "nsx-vlan-transportzone-312", "segmentation_id": 312, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap632bec04-d9", "ovs_interfaceid": "632bec04-d9b9-4178-bb3c-104caa5ee159", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 914.463921] env[62503]: DEBUG oslo_concurrency.lockutils [None req-64128f5f-d924-444f-a79b-4882784484bc tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Releasing lock "refresh_cache-629054bb-8fdb-45a2-8c07-216c4104d4a6" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 914.468778] env[62503]: DEBUG oslo_concurrency.lockutils [None req-2fb1368f-596d-44a6-8222-4d18b14f4c52 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Acquiring lock "c09488ed-e354-4abf-8999-b2f8afec44fc" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 914.468778] env[62503]: DEBUG oslo_concurrency.lockutils [None req-2fb1368f-596d-44a6-8222-4d18b14f4c52 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Lock "c09488ed-e354-4abf-8999-b2f8afec44fc" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 914.468778] env[62503]: DEBUG oslo_concurrency.lockutils [None req-2fb1368f-596d-44a6-8222-4d18b14f4c52 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Acquiring lock "c09488ed-e354-4abf-8999-b2f8afec44fc-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 914.468778] env[62503]: DEBUG oslo_concurrency.lockutils [None req-2fb1368f-596d-44a6-8222-4d18b14f4c52 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Lock "c09488ed-e354-4abf-8999-b2f8afec44fc-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 914.469033] env[62503]: DEBUG oslo_concurrency.lockutils [None req-2fb1368f-596d-44a6-8222-4d18b14f4c52 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Lock "c09488ed-e354-4abf-8999-b2f8afec44fc-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 914.473537] env[62503]: INFO nova.compute.manager [None req-2fb1368f-596d-44a6-8222-4d18b14f4c52 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: c09488ed-e354-4abf-8999-b2f8afec44fc] Terminating instance [ 914.477831] env[62503]: DEBUG nova.compute.manager [None req-2fb1368f-596d-44a6-8222-4d18b14f4c52 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: c09488ed-e354-4abf-8999-b2f8afec44fc] Start destroying the instance on the hypervisor. {{(pid=62503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3156}} [ 914.478123] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-2fb1368f-596d-44a6-8222-4d18b14f4c52 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: c09488ed-e354-4abf-8999-b2f8afec44fc] Destroying instance {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 914.479130] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a87b6aa-2e80-47cc-85ac-7dade88da6d3 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.487943] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-2fb1368f-596d-44a6-8222-4d18b14f4c52 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: c09488ed-e354-4abf-8999-b2f8afec44fc] Powering off the VM {{(pid=62503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 914.488201] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-630a2eec-947f-4a2e-9547-37e1d0989c40 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.494368] env[62503]: DEBUG oslo_vmware.api [None req-2fb1368f-596d-44a6-8222-4d18b14f4c52 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Waiting for the task: (returnval){ [ 914.494368] env[62503]: value = "task-1387920" [ 914.494368] env[62503]: _type = "Task" [ 914.494368] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 914.503224] env[62503]: DEBUG oslo_vmware.api [None req-2fb1368f-596d-44a6-8222-4d18b14f4c52 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Task: {'id': task-1387920, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.702736] env[62503]: DEBUG oslo_concurrency.lockutils [None req-2faef68b-c1ae-4dfa-9919-ba7d26bd1753 tempest-ServerTagsTestJSON-53821113 tempest-ServerTagsTestJSON-53821113-project-member] Acquiring lock "refresh_cache-c02c01b0-6f69-424f-8c0f-5a5c9ce1fad7" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 914.702899] env[62503]: DEBUG oslo_concurrency.lockutils [None req-2faef68b-c1ae-4dfa-9919-ba7d26bd1753 tempest-ServerTagsTestJSON-53821113 tempest-ServerTagsTestJSON-53821113-project-member] Acquired lock "refresh_cache-c02c01b0-6f69-424f-8c0f-5a5c9ce1fad7" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 914.703062] env[62503]: DEBUG nova.network.neutron [None req-2faef68b-c1ae-4dfa-9919-ba7d26bd1753 tempest-ServerTagsTestJSON-53821113 tempest-ServerTagsTestJSON-53821113-project-member] [instance: c02c01b0-6f69-424f-8c0f-5a5c9ce1fad7] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 914.725505] env[62503]: DEBUG oslo_concurrency.lockutils [None req-36792c89-a1e6-4e62-ac01-9fd3087c2872 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Releasing lock "refresh_cache-04054a79-70b6-409a-981f-6bf99fc3b4fc" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 914.725773] env[62503]: DEBUG nova.compute.manager [None req-36792c89-a1e6-4e62-ac01-9fd3087c2872 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: 04054a79-70b6-409a-981f-6bf99fc3b4fc] Instance network_info: |[{"id": "a99ea24a-aa97-4bea-9fa6-904a83e3aaa0", "address": "fa:16:3e:aa:ea:1e", "network": {"id": "ae12a8a2-c7af-40e0-878a-95b9a17d8356", "bridge": "br-int", "label": "tempest-ServersTestJSON-1776284685-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "44139c74b4b349af996a67f408a8441f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51ae336c-12cf-406a-b1ca-54e9ce553b3e", "external-id": "nsx-vlan-transportzone-30", "segmentation_id": 30, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa99ea24a-aa", "ovs_interfaceid": "a99ea24a-aa97-4bea-9fa6-904a83e3aaa0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1999}} [ 914.726293] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-36792c89-a1e6-4e62-ac01-9fd3087c2872 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: 04054a79-70b6-409a-981f-6bf99fc3b4fc] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:aa:ea:1e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '51ae336c-12cf-406a-b1ca-54e9ce553b3e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a99ea24a-aa97-4bea-9fa6-904a83e3aaa0', 'vif_model': 'vmxnet3'}] {{(pid=62503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 914.734568] env[62503]: DEBUG oslo.service.loopingcall [None req-36792c89-a1e6-4e62-ac01-9fd3087c2872 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 914.736898] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 04054a79-70b6-409a-981f-6bf99fc3b4fc] Creating VM on the ESX host {{(pid=62503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 914.737150] env[62503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-629db41d-8b93-4a1b-bb9d-3a3785cc5836 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.756350] env[62503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 914.756350] env[62503]: value = "task-1387921" [ 914.756350] env[62503]: _type = "Task" [ 914.756350] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 914.764035] env[62503]: DEBUG oslo_vmware.api [-] Task: {'id': task-1387921, 'name': CreateVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.774932] env[62503]: DEBUG oslo_vmware.api [None req-55afd046-fdf9-4041-8df2-ea3cb62954c4 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Task: {'id': task-1387919, 'name': PowerOnVM_Task, 'duration_secs': 0.397591} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 914.775225] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-55afd046-fdf9-4041-8df2-ea3cb62954c4 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: c9129f68-c755-4b78-b067-b77b01048c02] Powered on the VM {{(pid=62503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 914.775415] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-55afd046-fdf9-4041-8df2-ea3cb62954c4 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: c9129f68-c755-4b78-b067-b77b01048c02] Updating instance 'c9129f68-c755-4b78-b067-b77b01048c02' progress to 100 {{(pid=62503) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 914.793812] env[62503]: DEBUG oslo_concurrency.lockutils [None req-da58a238-4e42-4269-977f-a144551c2ae1 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Releasing lock "refresh_cache-16167e53-e45b-4b37-90c6-ab2c30ebf1aa" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 915.005008] env[62503]: DEBUG oslo_vmware.api [None req-2fb1368f-596d-44a6-8222-4d18b14f4c52 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Task: {'id': task-1387920, 'name': PowerOffVM_Task, 'duration_secs': 0.205965} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 915.005331] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-2fb1368f-596d-44a6-8222-4d18b14f4c52 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: c09488ed-e354-4abf-8999-b2f8afec44fc] Powered off the VM {{(pid=62503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 915.005413] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-2fb1368f-596d-44a6-8222-4d18b14f4c52 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: c09488ed-e354-4abf-8999-b2f8afec44fc] Unregistering the VM {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 915.005676] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e1958b22-6244-4e5c-85b6-7dea82508b3c {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.098726] env[62503]: DEBUG nova.compute.manager [req-0eb02bed-2933-4fc2-b8fe-ea79854dcf3e req-2b126834-ca01-460e-9e94-87036448bfef service nova] [instance: 16167e53-e45b-4b37-90c6-ab2c30ebf1aa] Received event network-vif-unplugged-632bec04-d9b9-4178-bb3c-104caa5ee159 {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 915.098862] env[62503]: DEBUG oslo_concurrency.lockutils [req-0eb02bed-2933-4fc2-b8fe-ea79854dcf3e req-2b126834-ca01-460e-9e94-87036448bfef service nova] Acquiring lock "16167e53-e45b-4b37-90c6-ab2c30ebf1aa-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 915.099104] env[62503]: DEBUG oslo_concurrency.lockutils [req-0eb02bed-2933-4fc2-b8fe-ea79854dcf3e req-2b126834-ca01-460e-9e94-87036448bfef service nova] Lock "16167e53-e45b-4b37-90c6-ab2c30ebf1aa-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 915.099294] env[62503]: DEBUG oslo_concurrency.lockutils [req-0eb02bed-2933-4fc2-b8fe-ea79854dcf3e req-2b126834-ca01-460e-9e94-87036448bfef service nova] Lock "16167e53-e45b-4b37-90c6-ab2c30ebf1aa-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 915.099443] env[62503]: DEBUG nova.compute.manager [req-0eb02bed-2933-4fc2-b8fe-ea79854dcf3e req-2b126834-ca01-460e-9e94-87036448bfef service nova] [instance: 16167e53-e45b-4b37-90c6-ab2c30ebf1aa] No waiting events found dispatching network-vif-unplugged-632bec04-d9b9-4178-bb3c-104caa5ee159 {{(pid=62503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 915.099612] env[62503]: WARNING nova.compute.manager [req-0eb02bed-2933-4fc2-b8fe-ea79854dcf3e req-2b126834-ca01-460e-9e94-87036448bfef service nova] [instance: 16167e53-e45b-4b37-90c6-ab2c30ebf1aa] Received unexpected event network-vif-unplugged-632bec04-d9b9-4178-bb3c-104caa5ee159 for instance with vm_state shelved and task_state shelving_offloading. [ 915.107073] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-da58a238-4e42-4269-977f-a144551c2ae1 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: 16167e53-e45b-4b37-90c6-ab2c30ebf1aa] Destroying instance {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 915.108311] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b94d9d99-a2e6-43e8-acb6-29783abb208d {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.116372] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-da58a238-4e42-4269-977f-a144551c2ae1 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: 16167e53-e45b-4b37-90c6-ab2c30ebf1aa] Unregistering the VM {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 915.116617] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8ca1e2f8-c51d-43b5-8d10-d5a97760bb5f {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.232502] env[62503]: DEBUG nova.network.neutron [None req-2faef68b-c1ae-4dfa-9919-ba7d26bd1753 tempest-ServerTagsTestJSON-53821113 tempest-ServerTagsTestJSON-53821113-project-member] [instance: c02c01b0-6f69-424f-8c0f-5a5c9ce1fad7] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 915.265979] env[62503]: DEBUG oslo_vmware.api [-] Task: {'id': task-1387921, 'name': CreateVM_Task} progress is 99%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.466683] env[62503]: DEBUG nova.network.neutron [None req-2faef68b-c1ae-4dfa-9919-ba7d26bd1753 tempest-ServerTagsTestJSON-53821113 tempest-ServerTagsTestJSON-53821113-project-member] [instance: c02c01b0-6f69-424f-8c0f-5a5c9ce1fad7] Updating instance_info_cache with network_info: [{"id": "21de8ca1-8ec9-4ca1-8a56-68c6d367064e", "address": "fa:16:3e:cc:12:bd", "network": {"id": "2e817e6c-800f-4570-9d1d-ab4efc1acac0", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-1224122777-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "53477edca58c448aaada8c1b6d65b070", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1002b79b-224e-41e3-a484-4245a767147a", "external-id": "nsx-vlan-transportzone-353", "segmentation_id": 353, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap21de8ca1-8e", "ovs_interfaceid": "21de8ca1-8ec9-4ca1-8a56-68c6d367064e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 915.470835] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-64128f5f-d924-444f-a79b-4882784484bc tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] [instance: 629054bb-8fdb-45a2-8c07-216c4104d4a6] Powering on the VM {{(pid=62503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 915.471148] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-75d635eb-22d1-406a-aeff-ec65cef41c24 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.481250] env[62503]: DEBUG oslo_vmware.api [None req-64128f5f-d924-444f-a79b-4882784484bc tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Waiting for the task: (returnval){ [ 915.481250] env[62503]: value = "task-1387924" [ 915.481250] env[62503]: _type = "Task" [ 915.481250] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 915.491508] env[62503]: DEBUG oslo_vmware.api [None req-64128f5f-d924-444f-a79b-4882784484bc tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Task: {'id': task-1387924, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.767541] env[62503]: DEBUG oslo_vmware.api [-] Task: {'id': task-1387921, 'name': CreateVM_Task} progress is 99%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.794660] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-2fb1368f-596d-44a6-8222-4d18b14f4c52 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: c09488ed-e354-4abf-8999-b2f8afec44fc] Unregistered the VM {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 915.794660] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-2fb1368f-596d-44a6-8222-4d18b14f4c52 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: c09488ed-e354-4abf-8999-b2f8afec44fc] Deleting contents of the VM from datastore datastore1 {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 915.794660] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-2fb1368f-596d-44a6-8222-4d18b14f4c52 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Deleting the datastore file [datastore1] c09488ed-e354-4abf-8999-b2f8afec44fc {{(pid=62503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 915.794991] env[62503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-93a7418b-da9a-46fc-a387-5aba0a00fab5 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.801753] env[62503]: DEBUG oslo_vmware.api [None req-2fb1368f-596d-44a6-8222-4d18b14f4c52 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Waiting for the task: (returnval){ [ 915.801753] env[62503]: value = "task-1387925" [ 915.801753] env[62503]: _type = "Task" [ 915.801753] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 915.810867] env[62503]: DEBUG oslo_vmware.api [None req-2fb1368f-596d-44a6-8222-4d18b14f4c52 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Task: {'id': task-1387925, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.823257] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-da58a238-4e42-4269-977f-a144551c2ae1 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: 16167e53-e45b-4b37-90c6-ab2c30ebf1aa] Unregistered the VM {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 915.823257] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-da58a238-4e42-4269-977f-a144551c2ae1 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: 16167e53-e45b-4b37-90c6-ab2c30ebf1aa] Deleting contents of the VM from datastore datastore2 {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 915.823257] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-da58a238-4e42-4269-977f-a144551c2ae1 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Deleting the datastore file [datastore2] 16167e53-e45b-4b37-90c6-ab2c30ebf1aa {{(pid=62503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 915.823257] env[62503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1cc522a8-6b29-4095-a704-4e55e2aaa539 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.830016] env[62503]: DEBUG oslo_vmware.api [None req-da58a238-4e42-4269-977f-a144551c2ae1 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Waiting for the task: (returnval){ [ 915.830016] env[62503]: value = "task-1387926" [ 915.830016] env[62503]: _type = "Task" [ 915.830016] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 915.839517] env[62503]: DEBUG oslo_vmware.api [None req-da58a238-4e42-4269-977f-a144551c2ae1 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Task: {'id': task-1387926, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.969568] env[62503]: DEBUG oslo_concurrency.lockutils [None req-2faef68b-c1ae-4dfa-9919-ba7d26bd1753 tempest-ServerTagsTestJSON-53821113 tempest-ServerTagsTestJSON-53821113-project-member] Releasing lock "refresh_cache-c02c01b0-6f69-424f-8c0f-5a5c9ce1fad7" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 915.969968] env[62503]: DEBUG nova.compute.manager [None req-2faef68b-c1ae-4dfa-9919-ba7d26bd1753 tempest-ServerTagsTestJSON-53821113 tempest-ServerTagsTestJSON-53821113-project-member] [instance: c02c01b0-6f69-424f-8c0f-5a5c9ce1fad7] Instance network_info: |[{"id": "21de8ca1-8ec9-4ca1-8a56-68c6d367064e", "address": "fa:16:3e:cc:12:bd", "network": {"id": "2e817e6c-800f-4570-9d1d-ab4efc1acac0", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-1224122777-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "53477edca58c448aaada8c1b6d65b070", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1002b79b-224e-41e3-a484-4245a767147a", "external-id": "nsx-vlan-transportzone-353", "segmentation_id": 353, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap21de8ca1-8e", "ovs_interfaceid": "21de8ca1-8ec9-4ca1-8a56-68c6d367064e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1999}} [ 915.970530] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-2faef68b-c1ae-4dfa-9919-ba7d26bd1753 tempest-ServerTagsTestJSON-53821113 tempest-ServerTagsTestJSON-53821113-project-member] [instance: c02c01b0-6f69-424f-8c0f-5a5c9ce1fad7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:cc:12:bd', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1002b79b-224e-41e3-a484-4245a767147a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '21de8ca1-8ec9-4ca1-8a56-68c6d367064e', 'vif_model': 'vmxnet3'}] {{(pid=62503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 915.979573] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-2faef68b-c1ae-4dfa-9919-ba7d26bd1753 tempest-ServerTagsTestJSON-53821113 tempest-ServerTagsTestJSON-53821113-project-member] Creating folder: Project (53477edca58c448aaada8c1b6d65b070). Parent ref: group-v294540. {{(pid=62503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 915.980439] env[62503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e3747bf3-160e-4177-aa48-eac3b7ea37e5 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.991630] env[62503]: DEBUG oslo_vmware.api [None req-64128f5f-d924-444f-a79b-4882784484bc tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Task: {'id': task-1387924, 'name': PowerOnVM_Task, 'duration_secs': 0.40202} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 915.991630] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-64128f5f-d924-444f-a79b-4882784484bc tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] [instance: 629054bb-8fdb-45a2-8c07-216c4104d4a6] Powered on the VM {{(pid=62503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 915.991862] env[62503]: DEBUG nova.compute.manager [None req-64128f5f-d924-444f-a79b-4882784484bc tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] [instance: 629054bb-8fdb-45a2-8c07-216c4104d4a6] Checking state {{(pid=62503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1794}} [ 915.992512] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-282b0763-29b0-4ed4-81f6-cf3eeefdf91c {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.997645] env[62503]: INFO nova.virt.vmwareapi.vm_util [None req-2faef68b-c1ae-4dfa-9919-ba7d26bd1753 tempest-ServerTagsTestJSON-53821113 tempest-ServerTagsTestJSON-53821113-project-member] Created folder: Project (53477edca58c448aaada8c1b6d65b070) in parent group-v294540. [ 915.997906] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-2faef68b-c1ae-4dfa-9919-ba7d26bd1753 tempest-ServerTagsTestJSON-53821113 tempest-ServerTagsTestJSON-53821113-project-member] Creating folder: Instances. Parent ref: group-v294608. {{(pid=62503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 915.998718] env[62503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-228625dd-0636-4aa9-8e01-5a80cce3c065 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.013143] env[62503]: INFO nova.virt.vmwareapi.vm_util [None req-2faef68b-c1ae-4dfa-9919-ba7d26bd1753 tempest-ServerTagsTestJSON-53821113 tempest-ServerTagsTestJSON-53821113-project-member] Created folder: Instances in parent group-v294608. [ 916.013450] env[62503]: DEBUG oslo.service.loopingcall [None req-2faef68b-c1ae-4dfa-9919-ba7d26bd1753 tempest-ServerTagsTestJSON-53821113 tempest-ServerTagsTestJSON-53821113-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 916.013662] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c02c01b0-6f69-424f-8c0f-5a5c9ce1fad7] Creating VM on the ESX host {{(pid=62503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 916.013952] env[62503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-318d1189-3c21-4934-bc4e-cc92b8260291 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.030851] env[62503]: DEBUG nova.compute.manager [req-c0970a1a-c143-4bd6-8461-7e399b3dc68f req-02d39032-5bcf-4283-a05e-c3a90be3931c service nova] [instance: c02c01b0-6f69-424f-8c0f-5a5c9ce1fad7] Received event network-changed-21de8ca1-8ec9-4ca1-8a56-68c6d367064e {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 916.031049] env[62503]: DEBUG nova.compute.manager [req-c0970a1a-c143-4bd6-8461-7e399b3dc68f req-02d39032-5bcf-4283-a05e-c3a90be3931c service nova] [instance: c02c01b0-6f69-424f-8c0f-5a5c9ce1fad7] Refreshing instance network info cache due to event network-changed-21de8ca1-8ec9-4ca1-8a56-68c6d367064e. {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11432}} [ 916.031274] env[62503]: DEBUG oslo_concurrency.lockutils [req-c0970a1a-c143-4bd6-8461-7e399b3dc68f req-02d39032-5bcf-4283-a05e-c3a90be3931c service nova] Acquiring lock "refresh_cache-c02c01b0-6f69-424f-8c0f-5a5c9ce1fad7" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 916.031418] env[62503]: DEBUG oslo_concurrency.lockutils [req-c0970a1a-c143-4bd6-8461-7e399b3dc68f req-02d39032-5bcf-4283-a05e-c3a90be3931c service nova] Acquired lock "refresh_cache-c02c01b0-6f69-424f-8c0f-5a5c9ce1fad7" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 916.031591] env[62503]: DEBUG nova.network.neutron [req-c0970a1a-c143-4bd6-8461-7e399b3dc68f req-02d39032-5bcf-4283-a05e-c3a90be3931c service nova] [instance: c02c01b0-6f69-424f-8c0f-5a5c9ce1fad7] Refreshing network info cache for port 21de8ca1-8ec9-4ca1-8a56-68c6d367064e {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 916.037871] env[62503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 916.037871] env[62503]: value = "task-1387929" [ 916.037871] env[62503]: _type = "Task" [ 916.037871] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 916.047235] env[62503]: DEBUG oslo_vmware.api [-] Task: {'id': task-1387929, 'name': CreateVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.267798] env[62503]: DEBUG oslo_vmware.api [-] Task: {'id': task-1387921, 'name': CreateVM_Task, 'duration_secs': 1.32583} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 916.267976] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 04054a79-70b6-409a-981f-6bf99fc3b4fc] Created VM on the ESX host {{(pid=62503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 916.268951] env[62503]: DEBUG oslo_concurrency.lockutils [None req-36792c89-a1e6-4e62-ac01-9fd3087c2872 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 916.269144] env[62503]: DEBUG oslo_concurrency.lockutils [None req-36792c89-a1e6-4e62-ac01-9fd3087c2872 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 916.269464] env[62503]: DEBUG oslo_concurrency.lockutils [None req-36792c89-a1e6-4e62-ac01-9fd3087c2872 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 916.269729] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-baedb401-92a3-40fe-933c-47c7a3036c9a {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.275246] env[62503]: DEBUG oslo_vmware.api [None req-36792c89-a1e6-4e62-ac01-9fd3087c2872 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Waiting for the task: (returnval){ [ 916.275246] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52559081-f2ec-9ef2-a8e2-3655a4515ce9" [ 916.275246] env[62503]: _type = "Task" [ 916.275246] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 916.283489] env[62503]: DEBUG oslo_vmware.api [None req-36792c89-a1e6-4e62-ac01-9fd3087c2872 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52559081-f2ec-9ef2-a8e2-3655a4515ce9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.312334] env[62503]: DEBUG oslo_vmware.api [None req-2fb1368f-596d-44a6-8222-4d18b14f4c52 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Task: {'id': task-1387925, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.153729} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 916.313280] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-2fb1368f-596d-44a6-8222-4d18b14f4c52 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Deleted the datastore file {{(pid=62503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 916.313280] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-2fb1368f-596d-44a6-8222-4d18b14f4c52 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: c09488ed-e354-4abf-8999-b2f8afec44fc] Deleted contents of the VM from datastore datastore1 {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 916.313430] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-2fb1368f-596d-44a6-8222-4d18b14f4c52 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: c09488ed-e354-4abf-8999-b2f8afec44fc] Instance destroyed {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 916.314073] env[62503]: INFO nova.compute.manager [None req-2fb1368f-596d-44a6-8222-4d18b14f4c52 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: c09488ed-e354-4abf-8999-b2f8afec44fc] Took 1.84 seconds to destroy the instance on the hypervisor. [ 916.314073] env[62503]: DEBUG oslo.service.loopingcall [None req-2fb1368f-596d-44a6-8222-4d18b14f4c52 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 916.314073] env[62503]: DEBUG nova.compute.manager [-] [instance: c09488ed-e354-4abf-8999-b2f8afec44fc] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 916.314235] env[62503]: DEBUG nova.network.neutron [-] [instance: c09488ed-e354-4abf-8999-b2f8afec44fc] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 916.340486] env[62503]: DEBUG oslo_vmware.api [None req-da58a238-4e42-4269-977f-a144551c2ae1 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Task: {'id': task-1387926, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.143948} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 916.340837] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-da58a238-4e42-4269-977f-a144551c2ae1 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Deleted the datastore file {{(pid=62503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 916.341050] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-da58a238-4e42-4269-977f-a144551c2ae1 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: 16167e53-e45b-4b37-90c6-ab2c30ebf1aa] Deleted contents of the VM from datastore datastore2 {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 916.341236] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-da58a238-4e42-4269-977f-a144551c2ae1 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: 16167e53-e45b-4b37-90c6-ab2c30ebf1aa] Instance destroyed {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 916.364404] env[62503]: INFO nova.scheduler.client.report [None req-da58a238-4e42-4269-977f-a144551c2ae1 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Deleted allocations for instance 16167e53-e45b-4b37-90c6-ab2c30ebf1aa [ 916.548548] env[62503]: DEBUG oslo_vmware.api [-] Task: {'id': task-1387929, 'name': CreateVM_Task, 'duration_secs': 0.508994} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 916.548733] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c02c01b0-6f69-424f-8c0f-5a5c9ce1fad7] Created VM on the ESX host {{(pid=62503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 916.549705] env[62503]: DEBUG oslo_concurrency.lockutils [None req-2faef68b-c1ae-4dfa-9919-ba7d26bd1753 tempest-ServerTagsTestJSON-53821113 tempest-ServerTagsTestJSON-53821113-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 916.789111] env[62503]: DEBUG oslo_vmware.api [None req-36792c89-a1e6-4e62-ac01-9fd3087c2872 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52559081-f2ec-9ef2-a8e2-3655a4515ce9, 'name': SearchDatastore_Task, 'duration_secs': 0.008995} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 916.789457] env[62503]: DEBUG oslo_concurrency.lockutils [None req-36792c89-a1e6-4e62-ac01-9fd3087c2872 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 916.789919] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-36792c89-a1e6-4e62-ac01-9fd3087c2872 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: 04054a79-70b6-409a-981f-6bf99fc3b4fc] Processing image 8150ca02-f879-471d-8913-459408f127a1 {{(pid=62503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 916.790245] env[62503]: DEBUG oslo_concurrency.lockutils [None req-36792c89-a1e6-4e62-ac01-9fd3087c2872 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 916.790536] env[62503]: DEBUG oslo_concurrency.lockutils [None req-36792c89-a1e6-4e62-ac01-9fd3087c2872 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 916.790806] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-36792c89-a1e6-4e62-ac01-9fd3087c2872 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 916.791160] env[62503]: DEBUG oslo_concurrency.lockutils [None req-2faef68b-c1ae-4dfa-9919-ba7d26bd1753 tempest-ServerTagsTestJSON-53821113 tempest-ServerTagsTestJSON-53821113-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 916.791522] env[62503]: DEBUG oslo_concurrency.lockutils [None req-2faef68b-c1ae-4dfa-9919-ba7d26bd1753 tempest-ServerTagsTestJSON-53821113 tempest-ServerTagsTestJSON-53821113-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 916.792064] env[62503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c57bbbe1-0a2e-462a-8881-1b246830a325 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.794360] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4514154a-faed-4b9f-8eaf-61c02d762527 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.802145] env[62503]: DEBUG oslo_vmware.api [None req-2faef68b-c1ae-4dfa-9919-ba7d26bd1753 tempest-ServerTagsTestJSON-53821113 tempest-ServerTagsTestJSON-53821113-project-member] Waiting for the task: (returnval){ [ 916.802145] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]525cf18d-21d2-89d4-2476-5c48aaab9919" [ 916.802145] env[62503]: _type = "Task" [ 916.802145] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 916.815154] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-36792c89-a1e6-4e62-ac01-9fd3087c2872 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 916.815154] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-36792c89-a1e6-4e62-ac01-9fd3087c2872 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 916.815154] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-13c14701-af19-4074-b7c0-f6d217a82acf {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.820073] env[62503]: DEBUG oslo_vmware.api [None req-2faef68b-c1ae-4dfa-9919-ba7d26bd1753 tempest-ServerTagsTestJSON-53821113 tempest-ServerTagsTestJSON-53821113-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]525cf18d-21d2-89d4-2476-5c48aaab9919, 'name': SearchDatastore_Task, 'duration_secs': 0.011515} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 916.820700] env[62503]: DEBUG oslo_concurrency.lockutils [None req-2faef68b-c1ae-4dfa-9919-ba7d26bd1753 tempest-ServerTagsTestJSON-53821113 tempest-ServerTagsTestJSON-53821113-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 916.820947] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-2faef68b-c1ae-4dfa-9919-ba7d26bd1753 tempest-ServerTagsTestJSON-53821113 tempest-ServerTagsTestJSON-53821113-project-member] [instance: c02c01b0-6f69-424f-8c0f-5a5c9ce1fad7] Processing image 8150ca02-f879-471d-8913-459408f127a1 {{(pid=62503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 916.821188] env[62503]: DEBUG oslo_concurrency.lockutils [None req-2faef68b-c1ae-4dfa-9919-ba7d26bd1753 tempest-ServerTagsTestJSON-53821113 tempest-ServerTagsTestJSON-53821113-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 916.823221] env[62503]: DEBUG oslo_vmware.api [None req-36792c89-a1e6-4e62-ac01-9fd3087c2872 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Waiting for the task: (returnval){ [ 916.823221] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]5292d92f-4ada-c32a-8dbf-778b00f8af1e" [ 916.823221] env[62503]: _type = "Task" [ 916.823221] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 916.831667] env[62503]: DEBUG oslo_vmware.api [None req-36792c89-a1e6-4e62-ac01-9fd3087c2872 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]5292d92f-4ada-c32a-8dbf-778b00f8af1e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.867028] env[62503]: DEBUG nova.network.neutron [req-c0970a1a-c143-4bd6-8461-7e399b3dc68f req-02d39032-5bcf-4283-a05e-c3a90be3931c service nova] [instance: c02c01b0-6f69-424f-8c0f-5a5c9ce1fad7] Updated VIF entry in instance network info cache for port 21de8ca1-8ec9-4ca1-8a56-68c6d367064e. {{(pid=62503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 916.867152] env[62503]: DEBUG nova.network.neutron [req-c0970a1a-c143-4bd6-8461-7e399b3dc68f req-02d39032-5bcf-4283-a05e-c3a90be3931c service nova] [instance: c02c01b0-6f69-424f-8c0f-5a5c9ce1fad7] Updating instance_info_cache with network_info: [{"id": "21de8ca1-8ec9-4ca1-8a56-68c6d367064e", "address": "fa:16:3e:cc:12:bd", "network": {"id": "2e817e6c-800f-4570-9d1d-ab4efc1acac0", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-1224122777-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "53477edca58c448aaada8c1b6d65b070", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1002b79b-224e-41e3-a484-4245a767147a", "external-id": "nsx-vlan-transportzone-353", "segmentation_id": 353, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap21de8ca1-8e", "ovs_interfaceid": "21de8ca1-8ec9-4ca1-8a56-68c6d367064e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 916.872592] env[62503]: DEBUG oslo_concurrency.lockutils [None req-da58a238-4e42-4269-977f-a144551c2ae1 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 916.872876] env[62503]: DEBUG oslo_concurrency.lockutils [None req-da58a238-4e42-4269-977f-a144551c2ae1 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.004s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 916.873127] env[62503]: DEBUG nova.objects.instance [None req-da58a238-4e42-4269-977f-a144551c2ae1 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Lazy-loading 'resources' on Instance uuid 16167e53-e45b-4b37-90c6-ab2c30ebf1aa {{(pid=62503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 917.079276] env[62503]: DEBUG nova.network.neutron [-] [instance: c09488ed-e354-4abf-8999-b2f8afec44fc] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 917.127349] env[62503]: DEBUG nova.compute.manager [req-279d9353-40b9-4f59-957e-286b60cf9b00 req-7cf21855-9a23-4620-8c34-8ad03dccd1cf service nova] [instance: 16167e53-e45b-4b37-90c6-ab2c30ebf1aa] Received event network-changed-632bec04-d9b9-4178-bb3c-104caa5ee159 {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 917.127555] env[62503]: DEBUG nova.compute.manager [req-279d9353-40b9-4f59-957e-286b60cf9b00 req-7cf21855-9a23-4620-8c34-8ad03dccd1cf service nova] [instance: 16167e53-e45b-4b37-90c6-ab2c30ebf1aa] Refreshing instance network info cache due to event network-changed-632bec04-d9b9-4178-bb3c-104caa5ee159. {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11432}} [ 917.127850] env[62503]: DEBUG oslo_concurrency.lockutils [req-279d9353-40b9-4f59-957e-286b60cf9b00 req-7cf21855-9a23-4620-8c34-8ad03dccd1cf service nova] Acquiring lock "refresh_cache-16167e53-e45b-4b37-90c6-ab2c30ebf1aa" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 917.128355] env[62503]: DEBUG oslo_concurrency.lockutils [req-279d9353-40b9-4f59-957e-286b60cf9b00 req-7cf21855-9a23-4620-8c34-8ad03dccd1cf service nova] Acquired lock "refresh_cache-16167e53-e45b-4b37-90c6-ab2c30ebf1aa" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 917.128355] env[62503]: DEBUG nova.network.neutron [req-279d9353-40b9-4f59-957e-286b60cf9b00 req-7cf21855-9a23-4620-8c34-8ad03dccd1cf service nova] [instance: 16167e53-e45b-4b37-90c6-ab2c30ebf1aa] Refreshing network info cache for port 632bec04-d9b9-4178-bb3c-104caa5ee159 {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 917.336996] env[62503]: DEBUG oslo_vmware.api [None req-36792c89-a1e6-4e62-ac01-9fd3087c2872 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]5292d92f-4ada-c32a-8dbf-778b00f8af1e, 'name': SearchDatastore_Task, 'duration_secs': 0.012238} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 917.336996] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-98f1a352-372a-4864-8b87-06525869dc7a {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.342049] env[62503]: DEBUG oslo_vmware.api [None req-36792c89-a1e6-4e62-ac01-9fd3087c2872 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Waiting for the task: (returnval){ [ 917.342049] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52b4737f-aca2-6318-1b42-dfd3e5266926" [ 917.342049] env[62503]: _type = "Task" [ 917.342049] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 917.349965] env[62503]: DEBUG oslo_vmware.api [None req-36792c89-a1e6-4e62-ac01-9fd3087c2872 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52b4737f-aca2-6318-1b42-dfd3e5266926, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.375802] env[62503]: DEBUG oslo_concurrency.lockutils [req-c0970a1a-c143-4bd6-8461-7e399b3dc68f req-02d39032-5bcf-4283-a05e-c3a90be3931c service nova] Releasing lock "refresh_cache-c02c01b0-6f69-424f-8c0f-5a5c9ce1fad7" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 917.377661] env[62503]: DEBUG nova.objects.instance [None req-da58a238-4e42-4269-977f-a144551c2ae1 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Lazy-loading 'numa_topology' on Instance uuid 16167e53-e45b-4b37-90c6-ab2c30ebf1aa {{(pid=62503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 917.380764] env[62503]: DEBUG oslo_concurrency.lockutils [None req-139fd144-9ce2-4f91-87cb-15cf9d98f0c6 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Acquiring lock "c9129f68-c755-4b78-b067-b77b01048c02" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 917.381014] env[62503]: DEBUG oslo_concurrency.lockutils [None req-139fd144-9ce2-4f91-87cb-15cf9d98f0c6 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Lock "c9129f68-c755-4b78-b067-b77b01048c02" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 917.381209] env[62503]: DEBUG nova.compute.manager [None req-139fd144-9ce2-4f91-87cb-15cf9d98f0c6 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: c9129f68-c755-4b78-b067-b77b01048c02] Going to confirm migration 1 {{(pid=62503) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5079}} [ 917.582402] env[62503]: INFO nova.compute.manager [-] [instance: c09488ed-e354-4abf-8999-b2f8afec44fc] Took 1.27 seconds to deallocate network for instance. [ 917.854294] env[62503]: DEBUG oslo_vmware.api [None req-36792c89-a1e6-4e62-ac01-9fd3087c2872 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52b4737f-aca2-6318-1b42-dfd3e5266926, 'name': SearchDatastore_Task, 'duration_secs': 0.033823} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 917.855660] env[62503]: DEBUG oslo_concurrency.lockutils [None req-36792c89-a1e6-4e62-ac01-9fd3087c2872 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 917.855969] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-36792c89-a1e6-4e62-ac01-9fd3087c2872 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk to [datastore2] 04054a79-70b6-409a-981f-6bf99fc3b4fc/04054a79-70b6-409a-981f-6bf99fc3b4fc.vmdk {{(pid=62503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 917.856287] env[62503]: DEBUG oslo_concurrency.lockutils [None req-2faef68b-c1ae-4dfa-9919-ba7d26bd1753 tempest-ServerTagsTestJSON-53821113 tempest-ServerTagsTestJSON-53821113-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 917.856483] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-2faef68b-c1ae-4dfa-9919-ba7d26bd1753 tempest-ServerTagsTestJSON-53821113 tempest-ServerTagsTestJSON-53821113-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 917.856707] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-675ca274-c51c-45c7-9f82-560b3829ed81 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.858751] env[62503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9358b198-6801-4213-baaf-4c15aad7f3b7 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.866134] env[62503]: DEBUG oslo_vmware.api [None req-36792c89-a1e6-4e62-ac01-9fd3087c2872 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Waiting for the task: (returnval){ [ 917.866134] env[62503]: value = "task-1387930" [ 917.866134] env[62503]: _type = "Task" [ 917.866134] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 917.870589] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-2faef68b-c1ae-4dfa-9919-ba7d26bd1753 tempest-ServerTagsTestJSON-53821113 tempest-ServerTagsTestJSON-53821113-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 917.870795] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-2faef68b-c1ae-4dfa-9919-ba7d26bd1753 tempest-ServerTagsTestJSON-53821113 tempest-ServerTagsTestJSON-53821113-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 917.872016] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-932ce025-61ed-479b-b2c9-036e377f2c5a {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.877238] env[62503]: DEBUG oslo_vmware.api [None req-36792c89-a1e6-4e62-ac01-9fd3087c2872 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Task: {'id': task-1387930, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.880176] env[62503]: DEBUG nova.objects.base [None req-da58a238-4e42-4269-977f-a144551c2ae1 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Object Instance<16167e53-e45b-4b37-90c6-ab2c30ebf1aa> lazy-loaded attributes: resources,numa_topology {{(pid=62503) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 917.889628] env[62503]: DEBUG oslo_vmware.api [None req-2faef68b-c1ae-4dfa-9919-ba7d26bd1753 tempest-ServerTagsTestJSON-53821113 tempest-ServerTagsTestJSON-53821113-project-member] Waiting for the task: (returnval){ [ 917.889628] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]527e1095-2ced-f98d-3553-13aef5a36fe2" [ 917.889628] env[62503]: _type = "Task" [ 917.889628] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 917.899144] env[62503]: DEBUG oslo_vmware.api [None req-2faef68b-c1ae-4dfa-9919-ba7d26bd1753 tempest-ServerTagsTestJSON-53821113 tempest-ServerTagsTestJSON-53821113-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]527e1095-2ced-f98d-3553-13aef5a36fe2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.948147] env[62503]: DEBUG oslo_concurrency.lockutils [None req-139fd144-9ce2-4f91-87cb-15cf9d98f0c6 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Acquiring lock "refresh_cache-c9129f68-c755-4b78-b067-b77b01048c02" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 917.948345] env[62503]: DEBUG oslo_concurrency.lockutils [None req-139fd144-9ce2-4f91-87cb-15cf9d98f0c6 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Acquired lock "refresh_cache-c9129f68-c755-4b78-b067-b77b01048c02" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 917.948525] env[62503]: DEBUG nova.network.neutron [None req-139fd144-9ce2-4f91-87cb-15cf9d98f0c6 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: c9129f68-c755-4b78-b067-b77b01048c02] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 917.948722] env[62503]: DEBUG nova.objects.instance [None req-139fd144-9ce2-4f91-87cb-15cf9d98f0c6 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Lazy-loading 'info_cache' on Instance uuid c9129f68-c755-4b78-b067-b77b01048c02 {{(pid=62503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 918.055762] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2dc6f3d-cab0-43c7-8386-75ddebfd6d65 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.060247] env[62503]: DEBUG nova.network.neutron [req-279d9353-40b9-4f59-957e-286b60cf9b00 req-7cf21855-9a23-4620-8c34-8ad03dccd1cf service nova] [instance: 16167e53-e45b-4b37-90c6-ab2c30ebf1aa] Updated VIF entry in instance network info cache for port 632bec04-d9b9-4178-bb3c-104caa5ee159. {{(pid=62503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 918.060749] env[62503]: DEBUG nova.network.neutron [req-279d9353-40b9-4f59-957e-286b60cf9b00 req-7cf21855-9a23-4620-8c34-8ad03dccd1cf service nova] [instance: 16167e53-e45b-4b37-90c6-ab2c30ebf1aa] Updating instance_info_cache with network_info: [{"id": "632bec04-d9b9-4178-bb3c-104caa5ee159", "address": "fa:16:3e:f5:45:70", "network": {"id": "fd4d9644-88d7-4c3d-9bee-9e25ca3a7d0b", "bridge": null, "label": "tempest-DeleteServersTestJSON-2096443058-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f823912f7b1a4998a6dbc22060cf6c5e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap632bec04-d9", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 918.068415] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86d3cca8-d63a-4e44-9ebe-af465e8bb99d {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.101436] env[62503]: DEBUG oslo_concurrency.lockutils [None req-2fb1368f-596d-44a6-8222-4d18b14f4c52 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 918.102544] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-322fa68c-cc51-4088-8954-b63d7e04cef3 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.110886] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e824504-aaf9-4697-bfcd-57a3816742c7 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.125462] env[62503]: DEBUG nova.compute.provider_tree [None req-da58a238-4e42-4269-977f-a144551c2ae1 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 918.127526] env[62503]: DEBUG oslo_concurrency.lockutils [None req-70f4a3d0-0e39-4a9c-b39c-be16c0b4dade tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Acquiring lock "16167e53-e45b-4b37-90c6-ab2c30ebf1aa" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 918.388567] env[62503]: DEBUG oslo_vmware.api [None req-36792c89-a1e6-4e62-ac01-9fd3087c2872 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Task: {'id': task-1387930, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.400948] env[62503]: DEBUG oslo_vmware.api [None req-2faef68b-c1ae-4dfa-9919-ba7d26bd1753 tempest-ServerTagsTestJSON-53821113 tempest-ServerTagsTestJSON-53821113-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]527e1095-2ced-f98d-3553-13aef5a36fe2, 'name': SearchDatastore_Task, 'duration_secs': 0.017583} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 918.401304] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7dd221b2-523a-4d64-a868-b0126d42d754 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.406781] env[62503]: DEBUG oslo_vmware.api [None req-2faef68b-c1ae-4dfa-9919-ba7d26bd1753 tempest-ServerTagsTestJSON-53821113 tempest-ServerTagsTestJSON-53821113-project-member] Waiting for the task: (returnval){ [ 918.406781] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]523d0fd9-145f-6e1b-3cf5-c44e47689034" [ 918.406781] env[62503]: _type = "Task" [ 918.406781] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 918.414977] env[62503]: DEBUG oslo_vmware.api [None req-2faef68b-c1ae-4dfa-9919-ba7d26bd1753 tempest-ServerTagsTestJSON-53821113 tempest-ServerTagsTestJSON-53821113-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]523d0fd9-145f-6e1b-3cf5-c44e47689034, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.563801] env[62503]: DEBUG oslo_concurrency.lockutils [req-279d9353-40b9-4f59-957e-286b60cf9b00 req-7cf21855-9a23-4620-8c34-8ad03dccd1cf service nova] Releasing lock "refresh_cache-16167e53-e45b-4b37-90c6-ab2c30ebf1aa" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 918.564211] env[62503]: DEBUG nova.compute.manager [req-279d9353-40b9-4f59-957e-286b60cf9b00 req-7cf21855-9a23-4620-8c34-8ad03dccd1cf service nova] [instance: c09488ed-e354-4abf-8999-b2f8afec44fc] Received event network-vif-deleted-305fe606-9e39-4466-9423-e9168de3ec21 {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 918.629149] env[62503]: DEBUG nova.scheduler.client.report [None req-da58a238-4e42-4269-977f-a144551c2ae1 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 918.883675] env[62503]: DEBUG oslo_vmware.api [None req-36792c89-a1e6-4e62-ac01-9fd3087c2872 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Task: {'id': task-1387930, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.917897] env[62503]: DEBUG oslo_vmware.api [None req-2faef68b-c1ae-4dfa-9919-ba7d26bd1753 tempest-ServerTagsTestJSON-53821113 tempest-ServerTagsTestJSON-53821113-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]523d0fd9-145f-6e1b-3cf5-c44e47689034, 'name': SearchDatastore_Task, 'duration_secs': 0.018724} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 918.918223] env[62503]: DEBUG oslo_concurrency.lockutils [None req-2faef68b-c1ae-4dfa-9919-ba7d26bd1753 tempest-ServerTagsTestJSON-53821113 tempest-ServerTagsTestJSON-53821113-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 918.918499] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-2faef68b-c1ae-4dfa-9919-ba7d26bd1753 tempest-ServerTagsTestJSON-53821113 tempest-ServerTagsTestJSON-53821113-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk to [datastore2] c02c01b0-6f69-424f-8c0f-5a5c9ce1fad7/c02c01b0-6f69-424f-8c0f-5a5c9ce1fad7.vmdk {{(pid=62503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 918.918855] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2a27d51b-7ec6-4625-ba13-c355545dc3dc {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.926290] env[62503]: DEBUG oslo_vmware.api [None req-2faef68b-c1ae-4dfa-9919-ba7d26bd1753 tempest-ServerTagsTestJSON-53821113 tempest-ServerTagsTestJSON-53821113-project-member] Waiting for the task: (returnval){ [ 918.926290] env[62503]: value = "task-1387931" [ 918.926290] env[62503]: _type = "Task" [ 918.926290] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 918.935530] env[62503]: DEBUG oslo_vmware.api [None req-2faef68b-c1ae-4dfa-9919-ba7d26bd1753 tempest-ServerTagsTestJSON-53821113 tempest-ServerTagsTestJSON-53821113-project-member] Task: {'id': task-1387931, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.951147] env[62503]: DEBUG oslo_concurrency.lockutils [None req-4dba9f2f-5a4d-4912-afbf-c0546063fc7e tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Acquiring lock "e7556915-634f-40d6-9e7f-da1c3201d8e4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 918.951400] env[62503]: DEBUG oslo_concurrency.lockutils [None req-4dba9f2f-5a4d-4912-afbf-c0546063fc7e tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Lock "e7556915-634f-40d6-9e7f-da1c3201d8e4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 918.951610] env[62503]: DEBUG oslo_concurrency.lockutils [None req-4dba9f2f-5a4d-4912-afbf-c0546063fc7e tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Acquiring lock "e7556915-634f-40d6-9e7f-da1c3201d8e4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 918.951835] env[62503]: DEBUG oslo_concurrency.lockutils [None req-4dba9f2f-5a4d-4912-afbf-c0546063fc7e tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Lock "e7556915-634f-40d6-9e7f-da1c3201d8e4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 918.952041] env[62503]: DEBUG oslo_concurrency.lockutils [None req-4dba9f2f-5a4d-4912-afbf-c0546063fc7e tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Lock "e7556915-634f-40d6-9e7f-da1c3201d8e4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 918.954061] env[62503]: INFO nova.compute.manager [None req-4dba9f2f-5a4d-4912-afbf-c0546063fc7e tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] [instance: e7556915-634f-40d6-9e7f-da1c3201d8e4] Terminating instance [ 918.957574] env[62503]: DEBUG nova.compute.manager [None req-4dba9f2f-5a4d-4912-afbf-c0546063fc7e tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] [instance: e7556915-634f-40d6-9e7f-da1c3201d8e4] Start destroying the instance on the hypervisor. {{(pid=62503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3156}} [ 918.957812] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-4dba9f2f-5a4d-4912-afbf-c0546063fc7e tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] [instance: e7556915-634f-40d6-9e7f-da1c3201d8e4] Destroying instance {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 918.958671] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed97e282-ff81-4514-ad53-e2224f79949c {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.966610] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-4dba9f2f-5a4d-4912-afbf-c0546063fc7e tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] [instance: e7556915-634f-40d6-9e7f-da1c3201d8e4] Powering off the VM {{(pid=62503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 918.967068] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-66bc4729-8162-41c1-81a2-4667ae0a13aa {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.973903] env[62503]: DEBUG oslo_vmware.api [None req-4dba9f2f-5a4d-4912-afbf-c0546063fc7e tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Waiting for the task: (returnval){ [ 918.973903] env[62503]: value = "task-1387932" [ 918.973903] env[62503]: _type = "Task" [ 918.973903] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 918.983600] env[62503]: DEBUG oslo_vmware.api [None req-4dba9f2f-5a4d-4912-afbf-c0546063fc7e tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Task: {'id': task-1387932, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.141192] env[62503]: DEBUG oslo_concurrency.lockutils [None req-da58a238-4e42-4269-977f-a144551c2ae1 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.266s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 919.147034] env[62503]: DEBUG oslo_concurrency.lockutils [None req-2fb1368f-596d-44a6-8222-4d18b14f4c52 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.045s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 919.147034] env[62503]: DEBUG nova.objects.instance [None req-2fb1368f-596d-44a6-8222-4d18b14f4c52 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Lazy-loading 'resources' on Instance uuid c09488ed-e354-4abf-8999-b2f8afec44fc {{(pid=62503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 919.386785] env[62503]: DEBUG oslo_vmware.api [None req-36792c89-a1e6-4e62-ac01-9fd3087c2872 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Task: {'id': task-1387930, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.060884} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 919.387472] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-36792c89-a1e6-4e62-ac01-9fd3087c2872 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk to [datastore2] 04054a79-70b6-409a-981f-6bf99fc3b4fc/04054a79-70b6-409a-981f-6bf99fc3b4fc.vmdk {{(pid=62503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 919.387472] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-36792c89-a1e6-4e62-ac01-9fd3087c2872 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: 04054a79-70b6-409a-981f-6bf99fc3b4fc] Extending root virtual disk to 1048576 {{(pid=62503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 919.388763] env[62503]: DEBUG nova.network.neutron [None req-139fd144-9ce2-4f91-87cb-15cf9d98f0c6 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: c9129f68-c755-4b78-b067-b77b01048c02] Updating instance_info_cache with network_info: [{"id": "792806f8-f6ea-4abd-9085-2a2ce83df26b", "address": "fa:16:3e:17:08:5e", "network": {"id": "8feb2896-b13e-4845-9a81-881bf941f703", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1117860439-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0849093c8b48400a8e9d56171ea99e8f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92cdccfd-4b10-4024-b724-5f22792dd4de", "external-id": "nsx-vlan-transportzone-902", "segmentation_id": 902, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap792806f8-f6", "ovs_interfaceid": "792806f8-f6ea-4abd-9085-2a2ce83df26b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 919.390247] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-80299c3b-1031-42e4-848e-4f8fd7ba85cf {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.398750] env[62503]: DEBUG oslo_vmware.api [None req-36792c89-a1e6-4e62-ac01-9fd3087c2872 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Waiting for the task: (returnval){ [ 919.398750] env[62503]: value = "task-1387933" [ 919.398750] env[62503]: _type = "Task" [ 919.398750] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 919.410092] env[62503]: DEBUG oslo_vmware.api [None req-36792c89-a1e6-4e62-ac01-9fd3087c2872 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Task: {'id': task-1387933, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.437250] env[62503]: DEBUG oslo_vmware.api [None req-2faef68b-c1ae-4dfa-9919-ba7d26bd1753 tempest-ServerTagsTestJSON-53821113 tempest-ServerTagsTestJSON-53821113-project-member] Task: {'id': task-1387931, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.484098] env[62503]: DEBUG oslo_vmware.api [None req-4dba9f2f-5a4d-4912-afbf-c0546063fc7e tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Task: {'id': task-1387932, 'name': PowerOffVM_Task, 'duration_secs': 0.197536} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 919.484844] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-4dba9f2f-5a4d-4912-afbf-c0546063fc7e tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] [instance: e7556915-634f-40d6-9e7f-da1c3201d8e4] Powered off the VM {{(pid=62503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 919.484844] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-4dba9f2f-5a4d-4912-afbf-c0546063fc7e tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] [instance: e7556915-634f-40d6-9e7f-da1c3201d8e4] Unregistering the VM {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 919.484958] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2adf43db-cd83-4a16-88f3-ff09d6be66c9 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.550087] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-4dba9f2f-5a4d-4912-afbf-c0546063fc7e tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] [instance: e7556915-634f-40d6-9e7f-da1c3201d8e4] Unregistered the VM {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 919.550348] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-4dba9f2f-5a4d-4912-afbf-c0546063fc7e tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] [instance: e7556915-634f-40d6-9e7f-da1c3201d8e4] Deleting contents of the VM from datastore datastore2 {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 919.550536] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-4dba9f2f-5a4d-4912-afbf-c0546063fc7e tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Deleting the datastore file [datastore2] e7556915-634f-40d6-9e7f-da1c3201d8e4 {{(pid=62503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 919.550846] env[62503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0fd1d8b9-188d-4d04-9619-3557838604b8 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.557671] env[62503]: DEBUG oslo_vmware.api [None req-4dba9f2f-5a4d-4912-afbf-c0546063fc7e tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Waiting for the task: (returnval){ [ 919.557671] env[62503]: value = "task-1387935" [ 919.557671] env[62503]: _type = "Task" [ 919.557671] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 919.565730] env[62503]: DEBUG oslo_vmware.api [None req-4dba9f2f-5a4d-4912-afbf-c0546063fc7e tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Task: {'id': task-1387935, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.655512] env[62503]: DEBUG oslo_concurrency.lockutils [None req-da58a238-4e42-4269-977f-a144551c2ae1 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Lock "16167e53-e45b-4b37-90c6-ab2c30ebf1aa" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 26.975s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 919.656317] env[62503]: DEBUG oslo_concurrency.lockutils [None req-70f4a3d0-0e39-4a9c-b39c-be16c0b4dade tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Lock "16167e53-e45b-4b37-90c6-ab2c30ebf1aa" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 1.529s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 919.656554] env[62503]: DEBUG oslo_concurrency.lockutils [None req-70f4a3d0-0e39-4a9c-b39c-be16c0b4dade tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Acquiring lock "16167e53-e45b-4b37-90c6-ab2c30ebf1aa-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 919.656767] env[62503]: DEBUG oslo_concurrency.lockutils [None req-70f4a3d0-0e39-4a9c-b39c-be16c0b4dade tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Lock "16167e53-e45b-4b37-90c6-ab2c30ebf1aa-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 919.656942] env[62503]: DEBUG oslo_concurrency.lockutils [None req-70f4a3d0-0e39-4a9c-b39c-be16c0b4dade tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Lock "16167e53-e45b-4b37-90c6-ab2c30ebf1aa-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 919.659977] env[62503]: INFO nova.compute.manager [None req-70f4a3d0-0e39-4a9c-b39c-be16c0b4dade tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: 16167e53-e45b-4b37-90c6-ab2c30ebf1aa] Terminating instance [ 919.662784] env[62503]: DEBUG nova.compute.manager [None req-70f4a3d0-0e39-4a9c-b39c-be16c0b4dade tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: 16167e53-e45b-4b37-90c6-ab2c30ebf1aa] Start destroying the instance on the hypervisor. {{(pid=62503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3156}} [ 919.663030] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-70f4a3d0-0e39-4a9c-b39c-be16c0b4dade tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: 16167e53-e45b-4b37-90c6-ab2c30ebf1aa] Destroying instance {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 919.663301] env[62503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-93915bc1-31f3-419c-b2c5-7232a959a0d3 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.672530] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a927a011-4fd3-4718-b688-430f0e6f73b4 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.703266] env[62503]: WARNING nova.virt.vmwareapi.vmops [None req-70f4a3d0-0e39-4a9c-b39c-be16c0b4dade tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: 16167e53-e45b-4b37-90c6-ab2c30ebf1aa] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 16167e53-e45b-4b37-90c6-ab2c30ebf1aa could not be found. [ 919.703489] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-70f4a3d0-0e39-4a9c-b39c-be16c0b4dade tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: 16167e53-e45b-4b37-90c6-ab2c30ebf1aa] Instance destroyed {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 919.703676] env[62503]: INFO nova.compute.manager [None req-70f4a3d0-0e39-4a9c-b39c-be16c0b4dade tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: 16167e53-e45b-4b37-90c6-ab2c30ebf1aa] Took 0.04 seconds to destroy the instance on the hypervisor. [ 919.703930] env[62503]: DEBUG oslo.service.loopingcall [None req-70f4a3d0-0e39-4a9c-b39c-be16c0b4dade tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 919.707855] env[62503]: DEBUG nova.compute.manager [-] [instance: 16167e53-e45b-4b37-90c6-ab2c30ebf1aa] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 919.707855] env[62503]: DEBUG nova.network.neutron [-] [instance: 16167e53-e45b-4b37-90c6-ab2c30ebf1aa] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 919.873228] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5347858-cbe0-4ab0-833e-33fce2754a6b {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.881444] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40b38f54-5cd7-4079-8196-62660933c5bd {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.629013] env[62503]: DEBUG oslo_concurrency.lockutils [None req-139fd144-9ce2-4f91-87cb-15cf9d98f0c6 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Releasing lock "refresh_cache-c9129f68-c755-4b78-b067-b77b01048c02" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 920.629498] env[62503]: DEBUG nova.objects.instance [None req-139fd144-9ce2-4f91-87cb-15cf9d98f0c6 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Lazy-loading 'migration_context' on Instance uuid c9129f68-c755-4b78-b067-b77b01048c02 {{(pid=62503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 920.646426] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3ce6450-9378-4db2-8cf6-a08a6bdd66d0 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.650829] env[62503]: DEBUG oslo_concurrency.lockutils [None req-f609ecce-b0c9-40f4-a25e-33556aca01b7 tempest-InstanceActionsV221TestJSON-959512409 tempest-InstanceActionsV221TestJSON-959512409-project-member] Acquiring lock "35bd28b5-101e-429f-8487-fbe5bf3528fb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 920.651124] env[62503]: DEBUG oslo_concurrency.lockutils [None req-f609ecce-b0c9-40f4-a25e-33556aca01b7 tempest-InstanceActionsV221TestJSON-959512409 tempest-InstanceActionsV221TestJSON-959512409-project-member] Lock "35bd28b5-101e-429f-8487-fbe5bf3528fb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 920.658694] env[62503]: DEBUG oslo_vmware.api [None req-36792c89-a1e6-4e62-ac01-9fd3087c2872 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Task: {'id': task-1387933, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076322} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 920.666797] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-36792c89-a1e6-4e62-ac01-9fd3087c2872 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: 04054a79-70b6-409a-981f-6bf99fc3b4fc] Extended root virtual disk {{(pid=62503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 920.667207] env[62503]: DEBUG oslo_vmware.api [None req-2faef68b-c1ae-4dfa-9919-ba7d26bd1753 tempest-ServerTagsTestJSON-53821113 tempest-ServerTagsTestJSON-53821113-project-member] Task: {'id': task-1387931, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.534229} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 920.668041] env[62503]: DEBUG oslo_vmware.api [None req-4dba9f2f-5a4d-4912-afbf-c0546063fc7e tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Task: {'id': task-1387935, 'name': DeleteDatastoreFile_Task, 'duration_secs': 1.051764} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 920.669252] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4eb98bd9-8eeb-4ff3-b2e7-0a8d68c688de {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.672384] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-2faef68b-c1ae-4dfa-9919-ba7d26bd1753 tempest-ServerTagsTestJSON-53821113 tempest-ServerTagsTestJSON-53821113-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk to [datastore2] c02c01b0-6f69-424f-8c0f-5a5c9ce1fad7/c02c01b0-6f69-424f-8c0f-5a5c9ce1fad7.vmdk {{(pid=62503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 920.672527] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-2faef68b-c1ae-4dfa-9919-ba7d26bd1753 tempest-ServerTagsTestJSON-53821113 tempest-ServerTagsTestJSON-53821113-project-member] [instance: c02c01b0-6f69-424f-8c0f-5a5c9ce1fad7] Extending root virtual disk to 1048576 {{(pid=62503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 920.672881] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-4dba9f2f-5a4d-4912-afbf-c0546063fc7e tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Deleted the datastore file {{(pid=62503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 920.673050] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-4dba9f2f-5a4d-4912-afbf-c0546063fc7e tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] [instance: e7556915-634f-40d6-9e7f-da1c3201d8e4] Deleted contents of the VM from datastore datastore2 {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 920.673112] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-4dba9f2f-5a4d-4912-afbf-c0546063fc7e tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] [instance: e7556915-634f-40d6-9e7f-da1c3201d8e4] Instance destroyed {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 920.673336] env[62503]: INFO nova.compute.manager [None req-4dba9f2f-5a4d-4912-afbf-c0546063fc7e tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] [instance: e7556915-634f-40d6-9e7f-da1c3201d8e4] Took 1.72 seconds to destroy the instance on the hypervisor. [ 920.677020] env[62503]: DEBUG oslo.service.loopingcall [None req-4dba9f2f-5a4d-4912-afbf-c0546063fc7e tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 920.677020] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f095f115-cdf3-49cc-9a59-2c836097bad5 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.677429] env[62503]: DEBUG nova.compute.manager [-] [instance: e7556915-634f-40d6-9e7f-da1c3201d8e4] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 920.678050] env[62503]: DEBUG nova.network.neutron [-] [instance: e7556915-634f-40d6-9e7f-da1c3201d8e4] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 920.680949] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a31780de-4033-4a1d-a6e0-68ae69a2f32b {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.702117] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-36792c89-a1e6-4e62-ac01-9fd3087c2872 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: 04054a79-70b6-409a-981f-6bf99fc3b4fc] Reconfiguring VM instance instance-00000051 to attach disk [datastore2] 04054a79-70b6-409a-981f-6bf99fc3b4fc/04054a79-70b6-409a-981f-6bf99fc3b4fc.vmdk or device None with type sparse {{(pid=62503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 920.704903] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2bc7d848-d607-4cd3-b8fc-41573d790637 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.719581] env[62503]: DEBUG oslo_vmware.api [None req-2faef68b-c1ae-4dfa-9919-ba7d26bd1753 tempest-ServerTagsTestJSON-53821113 tempest-ServerTagsTestJSON-53821113-project-member] Waiting for the task: (returnval){ [ 920.719581] env[62503]: value = "task-1387936" [ 920.719581] env[62503]: _type = "Task" [ 920.719581] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 920.728374] env[62503]: DEBUG nova.compute.provider_tree [None req-2fb1368f-596d-44a6-8222-4d18b14f4c52 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 920.734860] env[62503]: DEBUG oslo_vmware.api [None req-36792c89-a1e6-4e62-ac01-9fd3087c2872 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Waiting for the task: (returnval){ [ 920.734860] env[62503]: value = "task-1387937" [ 920.734860] env[62503]: _type = "Task" [ 920.734860] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 920.737876] env[62503]: DEBUG oslo_vmware.api [None req-2faef68b-c1ae-4dfa-9919-ba7d26bd1753 tempest-ServerTagsTestJSON-53821113 tempest-ServerTagsTestJSON-53821113-project-member] Task: {'id': task-1387936, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 920.746101] env[62503]: DEBUG oslo_vmware.api [None req-36792c89-a1e6-4e62-ac01-9fd3087c2872 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Task: {'id': task-1387937, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.133969] env[62503]: DEBUG nova.objects.base [None req-139fd144-9ce2-4f91-87cb-15cf9d98f0c6 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Object Instance lazy-loaded attributes: info_cache,migration_context {{(pid=62503) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 921.135267] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7984a91-5b69-406c-8188-24cb7c0598dc {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.155038] env[62503]: DEBUG nova.network.neutron [-] [instance: 16167e53-e45b-4b37-90c6-ab2c30ebf1aa] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 921.156972] env[62503]: DEBUG nova.compute.manager [None req-f609ecce-b0c9-40f4-a25e-33556aca01b7 tempest-InstanceActionsV221TestJSON-959512409 tempest-InstanceActionsV221TestJSON-959512409-project-member] [instance: 35bd28b5-101e-429f-8487-fbe5bf3528fb] Starting instance... {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 921.159225] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ba467538-0de8-4ef8-87b4-ad68167e96ba {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.164529] env[62503]: DEBUG oslo_vmware.api [None req-139fd144-9ce2-4f91-87cb-15cf9d98f0c6 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Waiting for the task: (returnval){ [ 921.164529] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52c0ad54-9d99-82b2-073b-674228164b89" [ 921.164529] env[62503]: _type = "Task" [ 921.164529] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 921.172853] env[62503]: DEBUG oslo_vmware.api [None req-139fd144-9ce2-4f91-87cb-15cf9d98f0c6 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52c0ad54-9d99-82b2-073b-674228164b89, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.234356] env[62503]: DEBUG nova.scheduler.client.report [None req-2fb1368f-596d-44a6-8222-4d18b14f4c52 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 921.240895] env[62503]: DEBUG oslo_vmware.api [None req-2faef68b-c1ae-4dfa-9919-ba7d26bd1753 tempest-ServerTagsTestJSON-53821113 tempest-ServerTagsTestJSON-53821113-project-member] Task: {'id': task-1387936, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.17553} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 921.244195] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-2faef68b-c1ae-4dfa-9919-ba7d26bd1753 tempest-ServerTagsTestJSON-53821113 tempest-ServerTagsTestJSON-53821113-project-member] [instance: c02c01b0-6f69-424f-8c0f-5a5c9ce1fad7] Extended root virtual disk {{(pid=62503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 921.245189] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-516eee7c-b195-4849-8e5f-630bffb48924 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.252910] env[62503]: DEBUG oslo_vmware.api [None req-36792c89-a1e6-4e62-ac01-9fd3087c2872 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Task: {'id': task-1387937, 'name': ReconfigVM_Task, 'duration_secs': 0.477442} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 921.262480] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-36792c89-a1e6-4e62-ac01-9fd3087c2872 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: 04054a79-70b6-409a-981f-6bf99fc3b4fc] Reconfigured VM instance instance-00000051 to attach disk [datastore2] 04054a79-70b6-409a-981f-6bf99fc3b4fc/04054a79-70b6-409a-981f-6bf99fc3b4fc.vmdk or device None with type sparse {{(pid=62503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 921.272031] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-2faef68b-c1ae-4dfa-9919-ba7d26bd1753 tempest-ServerTagsTestJSON-53821113 tempest-ServerTagsTestJSON-53821113-project-member] [instance: c02c01b0-6f69-424f-8c0f-5a5c9ce1fad7] Reconfiguring VM instance instance-00000052 to attach disk [datastore2] c02c01b0-6f69-424f-8c0f-5a5c9ce1fad7/c02c01b0-6f69-424f-8c0f-5a5c9ce1fad7.vmdk or device None with type sparse {{(pid=62503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 921.272031] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-01e5662c-44f5-489d-8b16-95315f74a1e4 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.273518] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-825a4f11-4542-4916-b655-e983eae3ef47 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.294384] env[62503]: DEBUG oslo_vmware.api [None req-2faef68b-c1ae-4dfa-9919-ba7d26bd1753 tempest-ServerTagsTestJSON-53821113 tempest-ServerTagsTestJSON-53821113-project-member] Waiting for the task: (returnval){ [ 921.294384] env[62503]: value = "task-1387939" [ 921.294384] env[62503]: _type = "Task" [ 921.294384] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 921.295728] env[62503]: DEBUG oslo_vmware.api [None req-36792c89-a1e6-4e62-ac01-9fd3087c2872 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Waiting for the task: (returnval){ [ 921.295728] env[62503]: value = "task-1387938" [ 921.295728] env[62503]: _type = "Task" [ 921.295728] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 921.309090] env[62503]: DEBUG oslo_vmware.api [None req-36792c89-a1e6-4e62-ac01-9fd3087c2872 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Task: {'id': task-1387938, 'name': Rename_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.309335] env[62503]: DEBUG oslo_vmware.api [None req-2faef68b-c1ae-4dfa-9919-ba7d26bd1753 tempest-ServerTagsTestJSON-53821113 tempest-ServerTagsTestJSON-53821113-project-member] Task: {'id': task-1387939, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.496112] env[62503]: DEBUG nova.network.neutron [-] [instance: e7556915-634f-40d6-9e7f-da1c3201d8e4] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 921.657584] env[62503]: INFO nova.compute.manager [-] [instance: 16167e53-e45b-4b37-90c6-ab2c30ebf1aa] Took 1.95 seconds to deallocate network for instance. [ 921.675523] env[62503]: DEBUG oslo_vmware.api [None req-139fd144-9ce2-4f91-87cb-15cf9d98f0c6 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52c0ad54-9d99-82b2-073b-674228164b89, 'name': SearchDatastore_Task, 'duration_secs': 0.00785} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 921.675837] env[62503]: DEBUG oslo_concurrency.lockutils [None req-139fd144-9ce2-4f91-87cb-15cf9d98f0c6 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 921.680975] env[62503]: DEBUG oslo_concurrency.lockutils [None req-f609ecce-b0c9-40f4-a25e-33556aca01b7 tempest-InstanceActionsV221TestJSON-959512409 tempest-InstanceActionsV221TestJSON-959512409-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 921.740049] env[62503]: DEBUG oslo_concurrency.lockutils [None req-2fb1368f-596d-44a6-8222-4d18b14f4c52 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.593s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 921.742212] env[62503]: DEBUG oslo_concurrency.lockutils [None req-139fd144-9ce2-4f91-87cb-15cf9d98f0c6 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 0.066s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 921.796244] env[62503]: INFO nova.scheduler.client.report [None req-2fb1368f-596d-44a6-8222-4d18b14f4c52 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Deleted allocations for instance c09488ed-e354-4abf-8999-b2f8afec44fc [ 921.814932] env[62503]: DEBUG oslo_vmware.api [None req-36792c89-a1e6-4e62-ac01-9fd3087c2872 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Task: {'id': task-1387938, 'name': Rename_Task, 'duration_secs': 0.149065} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 921.818876] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-36792c89-a1e6-4e62-ac01-9fd3087c2872 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: 04054a79-70b6-409a-981f-6bf99fc3b4fc] Powering on the VM {{(pid=62503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 921.819505] env[62503]: DEBUG oslo_vmware.api [None req-2faef68b-c1ae-4dfa-9919-ba7d26bd1753 tempest-ServerTagsTestJSON-53821113 tempest-ServerTagsTestJSON-53821113-project-member] Task: {'id': task-1387939, 'name': ReconfigVM_Task, 'duration_secs': 0.302399} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 921.819829] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-96214624-af95-447e-a597-c80891514a9b {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.822606] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-2faef68b-c1ae-4dfa-9919-ba7d26bd1753 tempest-ServerTagsTestJSON-53821113 tempest-ServerTagsTestJSON-53821113-project-member] [instance: c02c01b0-6f69-424f-8c0f-5a5c9ce1fad7] Reconfigured VM instance instance-00000052 to attach disk [datastore2] c02c01b0-6f69-424f-8c0f-5a5c9ce1fad7/c02c01b0-6f69-424f-8c0f-5a5c9ce1fad7.vmdk or device None with type sparse {{(pid=62503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 921.823409] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1a4296ee-c841-4d5a-b57d-42399964583b {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.831352] env[62503]: DEBUG oslo_vmware.api [None req-36792c89-a1e6-4e62-ac01-9fd3087c2872 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Waiting for the task: (returnval){ [ 921.831352] env[62503]: value = "task-1387940" [ 921.831352] env[62503]: _type = "Task" [ 921.831352] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 921.832348] env[62503]: DEBUG oslo_vmware.api [None req-2faef68b-c1ae-4dfa-9919-ba7d26bd1753 tempest-ServerTagsTestJSON-53821113 tempest-ServerTagsTestJSON-53821113-project-member] Waiting for the task: (returnval){ [ 921.832348] env[62503]: value = "task-1387941" [ 921.832348] env[62503]: _type = "Task" [ 921.832348] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 921.843515] env[62503]: DEBUG oslo_vmware.api [None req-36792c89-a1e6-4e62-ac01-9fd3087c2872 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Task: {'id': task-1387940, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.847841] env[62503]: DEBUG oslo_vmware.api [None req-2faef68b-c1ae-4dfa-9919-ba7d26bd1753 tempest-ServerTagsTestJSON-53821113 tempest-ServerTagsTestJSON-53821113-project-member] Task: {'id': task-1387941, 'name': Rename_Task} progress is 6%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.998911] env[62503]: INFO nova.compute.manager [-] [instance: e7556915-634f-40d6-9e7f-da1c3201d8e4] Took 1.32 seconds to deallocate network for instance. [ 922.059032] env[62503]: DEBUG nova.compute.manager [req-c31f1764-801c-4a5d-bd78-b0c0089fb8e9 req-225a46e4-57ae-49c4-a496-056d1bacfe2c service nova] [instance: e7556915-634f-40d6-9e7f-da1c3201d8e4] Received event network-vif-deleted-fea5a48c-97b0-4aa5-b402-a421e0392f74 {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 922.307366] env[62503]: DEBUG oslo_concurrency.lockutils [None req-2fb1368f-596d-44a6-8222-4d18b14f4c52 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Lock "c09488ed-e354-4abf-8999-b2f8afec44fc" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.839s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 922.345085] env[62503]: DEBUG oslo_vmware.api [None req-36792c89-a1e6-4e62-ac01-9fd3087c2872 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Task: {'id': task-1387940, 'name': PowerOnVM_Task, 'duration_secs': 0.460991} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 922.350840] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-36792c89-a1e6-4e62-ac01-9fd3087c2872 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: 04054a79-70b6-409a-981f-6bf99fc3b4fc] Powered on the VM {{(pid=62503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 922.351106] env[62503]: INFO nova.compute.manager [None req-36792c89-a1e6-4e62-ac01-9fd3087c2872 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: 04054a79-70b6-409a-981f-6bf99fc3b4fc] Took 10.97 seconds to spawn the instance on the hypervisor. [ 922.351360] env[62503]: DEBUG nova.compute.manager [None req-36792c89-a1e6-4e62-ac01-9fd3087c2872 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: 04054a79-70b6-409a-981f-6bf99fc3b4fc] Checking state {{(pid=62503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1794}} [ 922.351949] env[62503]: DEBUG oslo_vmware.api [None req-2faef68b-c1ae-4dfa-9919-ba7d26bd1753 tempest-ServerTagsTestJSON-53821113 tempest-ServerTagsTestJSON-53821113-project-member] Task: {'id': task-1387941, 'name': Rename_Task, 'duration_secs': 0.162296} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 922.353525] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6920b8c3-db07-4c7a-a1a8-9847fd35851b {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.355022] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-2faef68b-c1ae-4dfa-9919-ba7d26bd1753 tempest-ServerTagsTestJSON-53821113 tempest-ServerTagsTestJSON-53821113-project-member] [instance: c02c01b0-6f69-424f-8c0f-5a5c9ce1fad7] Powering on the VM {{(pid=62503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 922.355554] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-89048b60-2026-4367-b68c-51808a52cb82 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.367400] env[62503]: DEBUG oslo_vmware.api [None req-2faef68b-c1ae-4dfa-9919-ba7d26bd1753 tempest-ServerTagsTestJSON-53821113 tempest-ServerTagsTestJSON-53821113-project-member] Waiting for the task: (returnval){ [ 922.367400] env[62503]: value = "task-1387942" [ 922.367400] env[62503]: _type = "Task" [ 922.367400] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 922.375671] env[62503]: DEBUG oslo_vmware.api [None req-2faef68b-c1ae-4dfa-9919-ba7d26bd1753 tempest-ServerTagsTestJSON-53821113 tempest-ServerTagsTestJSON-53821113-project-member] Task: {'id': task-1387942, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.419164] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4608508-6089-4ad6-8a4a-169a9180bbee {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.427818] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7e8fbea-3524-4238-bdf8-986bfb4354a5 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.458607] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bfd30ac-d077-43b0-beda-35e773648c91 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.465982] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97ee921f-6370-41a7-9327-093fe2de25f2 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.479447] env[62503]: DEBUG nova.compute.provider_tree [None req-139fd144-9ce2-4f91-87cb-15cf9d98f0c6 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 922.509348] env[62503]: DEBUG oslo_concurrency.lockutils [None req-4dba9f2f-5a4d-4912-afbf-c0546063fc7e tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 922.682144] env[62503]: DEBUG oslo_concurrency.lockutils [None req-70f4a3d0-0e39-4a9c-b39c-be16c0b4dade tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Lock "16167e53-e45b-4b37-90c6-ab2c30ebf1aa" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 3.026s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 922.757327] env[62503]: DEBUG oslo_concurrency.lockutils [None req-f7331f54-a763-48c7-a6c8-0aa9c4ad457f tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Acquiring lock "9ccdc727-536e-4db8-bad4-960858254758" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 922.757443] env[62503]: DEBUG oslo_concurrency.lockutils [None req-f7331f54-a763-48c7-a6c8-0aa9c4ad457f tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Lock "9ccdc727-536e-4db8-bad4-960858254758" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 922.757920] env[62503]: DEBUG oslo_concurrency.lockutils [None req-f7331f54-a763-48c7-a6c8-0aa9c4ad457f tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Acquiring lock "9ccdc727-536e-4db8-bad4-960858254758-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 922.758319] env[62503]: DEBUG oslo_concurrency.lockutils [None req-f7331f54-a763-48c7-a6c8-0aa9c4ad457f tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Lock "9ccdc727-536e-4db8-bad4-960858254758-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 922.758590] env[62503]: DEBUG oslo_concurrency.lockutils [None req-f7331f54-a763-48c7-a6c8-0aa9c4ad457f tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Lock "9ccdc727-536e-4db8-bad4-960858254758-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 922.760906] env[62503]: INFO nova.compute.manager [None req-f7331f54-a763-48c7-a6c8-0aa9c4ad457f tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: 9ccdc727-536e-4db8-bad4-960858254758] Terminating instance [ 922.762968] env[62503]: DEBUG nova.compute.manager [None req-f7331f54-a763-48c7-a6c8-0aa9c4ad457f tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: 9ccdc727-536e-4db8-bad4-960858254758] Start destroying the instance on the hypervisor. {{(pid=62503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3156}} [ 922.763181] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-f7331f54-a763-48c7-a6c8-0aa9c4ad457f tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: 9ccdc727-536e-4db8-bad4-960858254758] Destroying instance {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 922.767019] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa18e4fb-0602-4957-b601-bb9cd3de2640 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.782334] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-f7331f54-a763-48c7-a6c8-0aa9c4ad457f tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: 9ccdc727-536e-4db8-bad4-960858254758] Powering off the VM {{(pid=62503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 922.782543] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7abd1db2-e29c-46f9-aa2f-66f2857646b4 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.790394] env[62503]: DEBUG oslo_vmware.api [None req-f7331f54-a763-48c7-a6c8-0aa9c4ad457f tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Waiting for the task: (returnval){ [ 922.790394] env[62503]: value = "task-1387943" [ 922.790394] env[62503]: _type = "Task" [ 922.790394] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 922.799760] env[62503]: DEBUG oslo_vmware.api [None req-f7331f54-a763-48c7-a6c8-0aa9c4ad457f tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Task: {'id': task-1387943, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.880721] env[62503]: INFO nova.compute.manager [None req-36792c89-a1e6-4e62-ac01-9fd3087c2872 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: 04054a79-70b6-409a-981f-6bf99fc3b4fc] Took 15.95 seconds to build instance. [ 922.886122] env[62503]: DEBUG oslo_vmware.api [None req-2faef68b-c1ae-4dfa-9919-ba7d26bd1753 tempest-ServerTagsTestJSON-53821113 tempest-ServerTagsTestJSON-53821113-project-member] Task: {'id': task-1387942, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.984883] env[62503]: DEBUG nova.scheduler.client.report [None req-139fd144-9ce2-4f91-87cb-15cf9d98f0c6 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 923.244512] env[62503]: DEBUG oslo_concurrency.lockutils [None req-5d3efbe8-4cdd-4e3e-a6a1-a7d4876131b5 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Acquiring lock "09ff6008-f1eb-4ee0-af7d-c7f8268e3eb9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 923.244713] env[62503]: DEBUG oslo_concurrency.lockutils [None req-5d3efbe8-4cdd-4e3e-a6a1-a7d4876131b5 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Lock "09ff6008-f1eb-4ee0-af7d-c7f8268e3eb9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 923.300250] env[62503]: DEBUG oslo_vmware.api [None req-f7331f54-a763-48c7-a6c8-0aa9c4ad457f tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Task: {'id': task-1387943, 'name': PowerOffVM_Task, 'duration_secs': 0.211562} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 923.300394] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-f7331f54-a763-48c7-a6c8-0aa9c4ad457f tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: 9ccdc727-536e-4db8-bad4-960858254758] Powered off the VM {{(pid=62503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 923.300621] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-f7331f54-a763-48c7-a6c8-0aa9c4ad457f tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: 9ccdc727-536e-4db8-bad4-960858254758] Unregistering the VM {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 923.300943] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-03f23fa4-781c-4ea9-9e46-84d95edd8989 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.360984] env[62503]: DEBUG oslo_concurrency.lockutils [None req-7eeeb217-fdb2-41f4-98af-448aa496c9ae tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Acquiring lock "5ba614a3-17be-4069-8219-f88f4d27aab9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 923.361329] env[62503]: DEBUG oslo_concurrency.lockutils [None req-7eeeb217-fdb2-41f4-98af-448aa496c9ae tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Lock "5ba614a3-17be-4069-8219-f88f4d27aab9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 923.382284] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-f7331f54-a763-48c7-a6c8-0aa9c4ad457f tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: 9ccdc727-536e-4db8-bad4-960858254758] Unregistered the VM {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 923.382513] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-f7331f54-a763-48c7-a6c8-0aa9c4ad457f tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: 9ccdc727-536e-4db8-bad4-960858254758] Deleting contents of the VM from datastore datastore1 {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 923.382695] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-f7331f54-a763-48c7-a6c8-0aa9c4ad457f tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Deleting the datastore file [datastore1] 9ccdc727-536e-4db8-bad4-960858254758 {{(pid=62503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 923.385753] env[62503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c8f95961-f396-44d1-ad18-a5cd892d5958 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.387502] env[62503]: DEBUG oslo_vmware.api [None req-2faef68b-c1ae-4dfa-9919-ba7d26bd1753 tempest-ServerTagsTestJSON-53821113 tempest-ServerTagsTestJSON-53821113-project-member] Task: {'id': task-1387942, 'name': PowerOnVM_Task, 'duration_secs': 0.782195} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 923.387792] env[62503]: DEBUG oslo_concurrency.lockutils [None req-36792c89-a1e6-4e62-ac01-9fd3087c2872 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Lock "04054a79-70b6-409a-981f-6bf99fc3b4fc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.464s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 923.388305] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-2faef68b-c1ae-4dfa-9919-ba7d26bd1753 tempest-ServerTagsTestJSON-53821113 tempest-ServerTagsTestJSON-53821113-project-member] [instance: c02c01b0-6f69-424f-8c0f-5a5c9ce1fad7] Powered on the VM {{(pid=62503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 923.388509] env[62503]: INFO nova.compute.manager [None req-2faef68b-c1ae-4dfa-9919-ba7d26bd1753 tempest-ServerTagsTestJSON-53821113 tempest-ServerTagsTestJSON-53821113-project-member] [instance: c02c01b0-6f69-424f-8c0f-5a5c9ce1fad7] Took 9.74 seconds to spawn the instance on the hypervisor. [ 923.388691] env[62503]: DEBUG nova.compute.manager [None req-2faef68b-c1ae-4dfa-9919-ba7d26bd1753 tempest-ServerTagsTestJSON-53821113 tempest-ServerTagsTestJSON-53821113-project-member] [instance: c02c01b0-6f69-424f-8c0f-5a5c9ce1fad7] Checking state {{(pid=62503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1794}} [ 923.389798] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f33fff8-4111-4f6a-b413-08fc01dac3a2 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.393278] env[62503]: DEBUG oslo_vmware.api [None req-f7331f54-a763-48c7-a6c8-0aa9c4ad457f tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Waiting for the task: (returnval){ [ 923.393278] env[62503]: value = "task-1387945" [ 923.393278] env[62503]: _type = "Task" [ 923.393278] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 923.405302] env[62503]: DEBUG oslo_concurrency.lockutils [None req-eadec515-4bb7-4564-9192-b80cbbecb99d tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Acquiring lock "04054a79-70b6-409a-981f-6bf99fc3b4fc" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 923.405534] env[62503]: DEBUG oslo_concurrency.lockutils [None req-eadec515-4bb7-4564-9192-b80cbbecb99d tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Lock "04054a79-70b6-409a-981f-6bf99fc3b4fc" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 923.405730] env[62503]: DEBUG oslo_concurrency.lockutils [None req-eadec515-4bb7-4564-9192-b80cbbecb99d tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Acquiring lock "04054a79-70b6-409a-981f-6bf99fc3b4fc-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 923.405914] env[62503]: DEBUG oslo_concurrency.lockutils [None req-eadec515-4bb7-4564-9192-b80cbbecb99d tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Lock "04054a79-70b6-409a-981f-6bf99fc3b4fc-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 923.406097] env[62503]: DEBUG oslo_concurrency.lockutils [None req-eadec515-4bb7-4564-9192-b80cbbecb99d tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Lock "04054a79-70b6-409a-981f-6bf99fc3b4fc-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 923.407713] env[62503]: DEBUG oslo_vmware.api [None req-f7331f54-a763-48c7-a6c8-0aa9c4ad457f tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Task: {'id': task-1387945, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.408143] env[62503]: INFO nova.compute.manager [None req-eadec515-4bb7-4564-9192-b80cbbecb99d tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: 04054a79-70b6-409a-981f-6bf99fc3b4fc] Terminating instance [ 923.409791] env[62503]: DEBUG nova.compute.manager [None req-eadec515-4bb7-4564-9192-b80cbbecb99d tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: 04054a79-70b6-409a-981f-6bf99fc3b4fc] Start destroying the instance on the hypervisor. {{(pid=62503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3156}} [ 923.409986] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-eadec515-4bb7-4564-9192-b80cbbecb99d tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: 04054a79-70b6-409a-981f-6bf99fc3b4fc] Destroying instance {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 923.410736] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-295eee8c-b32e-4267-88cf-94cca6081e66 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.417261] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-eadec515-4bb7-4564-9192-b80cbbecb99d tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: 04054a79-70b6-409a-981f-6bf99fc3b4fc] Powering off the VM {{(pid=62503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 923.417577] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-904a3801-8d31-405d-8a5e-35c6deeb81bb {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.423050] env[62503]: DEBUG oslo_vmware.api [None req-eadec515-4bb7-4564-9192-b80cbbecb99d tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Waiting for the task: (returnval){ [ 923.423050] env[62503]: value = "task-1387946" [ 923.423050] env[62503]: _type = "Task" [ 923.423050] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 923.431831] env[62503]: DEBUG oslo_vmware.api [None req-eadec515-4bb7-4564-9192-b80cbbecb99d tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Task: {'id': task-1387946, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.749367] env[62503]: DEBUG nova.compute.manager [None req-5d3efbe8-4cdd-4e3e-a6a1-a7d4876131b5 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: 09ff6008-f1eb-4ee0-af7d-c7f8268e3eb9] Starting instance... {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 923.863589] env[62503]: DEBUG nova.compute.manager [None req-7eeeb217-fdb2-41f4-98af-448aa496c9ae tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: 5ba614a3-17be-4069-8219-f88f4d27aab9] Starting instance... {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 923.907958] env[62503]: DEBUG oslo_vmware.api [None req-f7331f54-a763-48c7-a6c8-0aa9c4ad457f tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Task: {'id': task-1387945, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.159059} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 923.908235] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-f7331f54-a763-48c7-a6c8-0aa9c4ad457f tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Deleted the datastore file {{(pid=62503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 923.908423] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-f7331f54-a763-48c7-a6c8-0aa9c4ad457f tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: 9ccdc727-536e-4db8-bad4-960858254758] Deleted contents of the VM from datastore datastore1 {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 923.908619] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-f7331f54-a763-48c7-a6c8-0aa9c4ad457f tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: 9ccdc727-536e-4db8-bad4-960858254758] Instance destroyed {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 923.910788] env[62503]: INFO nova.compute.manager [None req-f7331f54-a763-48c7-a6c8-0aa9c4ad457f tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: 9ccdc727-536e-4db8-bad4-960858254758] Took 1.15 seconds to destroy the instance on the hypervisor. [ 923.910788] env[62503]: DEBUG oslo.service.loopingcall [None req-f7331f54-a763-48c7-a6c8-0aa9c4ad457f tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 923.910788] env[62503]: DEBUG nova.compute.manager [-] [instance: 9ccdc727-536e-4db8-bad4-960858254758] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 923.910942] env[62503]: DEBUG nova.network.neutron [-] [instance: 9ccdc727-536e-4db8-bad4-960858254758] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 923.912755] env[62503]: INFO nova.compute.manager [None req-2faef68b-c1ae-4dfa-9919-ba7d26bd1753 tempest-ServerTagsTestJSON-53821113 tempest-ServerTagsTestJSON-53821113-project-member] [instance: c02c01b0-6f69-424f-8c0f-5a5c9ce1fad7] Took 16.06 seconds to build instance. [ 923.934354] env[62503]: DEBUG oslo_vmware.api [None req-eadec515-4bb7-4564-9192-b80cbbecb99d tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Task: {'id': task-1387946, 'name': PowerOffVM_Task, 'duration_secs': 0.187665} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 923.934470] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-eadec515-4bb7-4564-9192-b80cbbecb99d tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: 04054a79-70b6-409a-981f-6bf99fc3b4fc] Powered off the VM {{(pid=62503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 923.935364] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-eadec515-4bb7-4564-9192-b80cbbecb99d tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: 04054a79-70b6-409a-981f-6bf99fc3b4fc] Unregistering the VM {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 923.935364] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-24965db1-4971-497b-84a1-e3f65d654f26 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.994772] env[62503]: DEBUG oslo_concurrency.lockutils [None req-139fd144-9ce2-4f91-87cb-15cf9d98f0c6 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.253s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 923.997577] env[62503]: DEBUG oslo_concurrency.lockutils [None req-f609ecce-b0c9-40f4-a25e-33556aca01b7 tempest-InstanceActionsV221TestJSON-959512409 tempest-InstanceActionsV221TestJSON-959512409-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.317s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 923.999436] env[62503]: INFO nova.compute.claims [None req-f609ecce-b0c9-40f4-a25e-33556aca01b7 tempest-InstanceActionsV221TestJSON-959512409 tempest-InstanceActionsV221TestJSON-959512409-project-member] [instance: 35bd28b5-101e-429f-8487-fbe5bf3528fb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 924.010333] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-eadec515-4bb7-4564-9192-b80cbbecb99d tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: 04054a79-70b6-409a-981f-6bf99fc3b4fc] Unregistered the VM {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 924.010614] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-eadec515-4bb7-4564-9192-b80cbbecb99d tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: 04054a79-70b6-409a-981f-6bf99fc3b4fc] Deleting contents of the VM from datastore datastore2 {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 924.010761] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-eadec515-4bb7-4564-9192-b80cbbecb99d tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Deleting the datastore file [datastore2] 04054a79-70b6-409a-981f-6bf99fc3b4fc {{(pid=62503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 924.011055] env[62503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0aebbbc5-b103-4c9d-9a89-19d7a34f3fb5 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.017584] env[62503]: DEBUG oslo_vmware.api [None req-eadec515-4bb7-4564-9192-b80cbbecb99d tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Waiting for the task: (returnval){ [ 924.017584] env[62503]: value = "task-1387948" [ 924.017584] env[62503]: _type = "Task" [ 924.017584] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 924.028018] env[62503]: DEBUG oslo_vmware.api [None req-eadec515-4bb7-4564-9192-b80cbbecb99d tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Task: {'id': task-1387948, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.151321] env[62503]: DEBUG oslo_concurrency.lockutils [None req-9e86ee46-4d8c-4f28-84b2-a369260b22e3 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Acquiring lock "e0df0ce5-1e88-4a39-8911-529b235f5b88" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 924.151321] env[62503]: DEBUG oslo_concurrency.lockutils [None req-9e86ee46-4d8c-4f28-84b2-a369260b22e3 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Lock "e0df0ce5-1e88-4a39-8911-529b235f5b88" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 924.213066] env[62503]: DEBUG nova.compute.manager [req-f98f31b0-7284-48aa-b0db-237fe61c7fb7 req-62e8fd09-cca7-4e24-bc86-1358fe77c942 service nova] [instance: 9ccdc727-536e-4db8-bad4-960858254758] Received event network-vif-deleted-992a63b9-4bca-47ba-abed-96804de62f70 {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 924.213269] env[62503]: INFO nova.compute.manager [req-f98f31b0-7284-48aa-b0db-237fe61c7fb7 req-62e8fd09-cca7-4e24-bc86-1358fe77c942 service nova] [instance: 9ccdc727-536e-4db8-bad4-960858254758] Neutron deleted interface 992a63b9-4bca-47ba-abed-96804de62f70; detaching it from the instance and deleting it from the info cache [ 924.213450] env[62503]: DEBUG nova.network.neutron [req-f98f31b0-7284-48aa-b0db-237fe61c7fb7 req-62e8fd09-cca7-4e24-bc86-1358fe77c942 service nova] [instance: 9ccdc727-536e-4db8-bad4-960858254758] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 924.273165] env[62503]: DEBUG oslo_concurrency.lockutils [None req-5d3efbe8-4cdd-4e3e-a6a1-a7d4876131b5 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 924.381989] env[62503]: DEBUG oslo_concurrency.lockutils [None req-7eeeb217-fdb2-41f4-98af-448aa496c9ae tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 924.415756] env[62503]: DEBUG oslo_concurrency.lockutils [None req-2faef68b-c1ae-4dfa-9919-ba7d26bd1753 tempest-ServerTagsTestJSON-53821113 tempest-ServerTagsTestJSON-53821113-project-member] Lock "c02c01b0-6f69-424f-8c0f-5a5c9ce1fad7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.578s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 924.527642] env[62503]: DEBUG oslo_vmware.api [None req-eadec515-4bb7-4564-9192-b80cbbecb99d tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Task: {'id': task-1387948, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.561041] env[62503]: INFO nova.scheduler.client.report [None req-139fd144-9ce2-4f91-87cb-15cf9d98f0c6 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Deleted allocation for migration be380929-2664-4c9b-ad25-ae71398e93d8 [ 924.653460] env[62503]: DEBUG nova.compute.manager [None req-9e86ee46-4d8c-4f28-84b2-a369260b22e3 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: e0df0ce5-1e88-4a39-8911-529b235f5b88] Starting instance... {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 924.687309] env[62503]: DEBUG nova.network.neutron [-] [instance: 9ccdc727-536e-4db8-bad4-960858254758] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 924.716962] env[62503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7fddc879-96f3-4940-95ca-111eef16d161 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.725870] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3526cee3-177e-4bc7-9baa-980dfd9bf157 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.753878] env[62503]: DEBUG nova.compute.manager [req-f98f31b0-7284-48aa-b0db-237fe61c7fb7 req-62e8fd09-cca7-4e24-bc86-1358fe77c942 service nova] [instance: 9ccdc727-536e-4db8-bad4-960858254758] Detach interface failed, port_id=992a63b9-4bca-47ba-abed-96804de62f70, reason: Instance 9ccdc727-536e-4db8-bad4-960858254758 could not be found. {{(pid=62503) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11261}} [ 925.027361] env[62503]: DEBUG oslo_vmware.api [None req-eadec515-4bb7-4564-9192-b80cbbecb99d tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Task: {'id': task-1387948, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.542441} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.027625] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-eadec515-4bb7-4564-9192-b80cbbecb99d tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Deleted the datastore file {{(pid=62503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 925.027832] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-eadec515-4bb7-4564-9192-b80cbbecb99d tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: 04054a79-70b6-409a-981f-6bf99fc3b4fc] Deleted contents of the VM from datastore datastore2 {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 925.027993] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-eadec515-4bb7-4564-9192-b80cbbecb99d tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: 04054a79-70b6-409a-981f-6bf99fc3b4fc] Instance destroyed {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 925.028233] env[62503]: INFO nova.compute.manager [None req-eadec515-4bb7-4564-9192-b80cbbecb99d tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: 04054a79-70b6-409a-981f-6bf99fc3b4fc] Took 1.62 seconds to destroy the instance on the hypervisor. [ 925.028492] env[62503]: DEBUG oslo.service.loopingcall [None req-eadec515-4bb7-4564-9192-b80cbbecb99d tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 925.030997] env[62503]: DEBUG nova.compute.manager [-] [instance: 04054a79-70b6-409a-981f-6bf99fc3b4fc] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 925.031126] env[62503]: DEBUG nova.network.neutron [-] [instance: 04054a79-70b6-409a-981f-6bf99fc3b4fc] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 925.066111] env[62503]: DEBUG oslo_concurrency.lockutils [None req-139fd144-9ce2-4f91-87cb-15cf9d98f0c6 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Lock "c9129f68-c755-4b78-b067-b77b01048c02" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 7.685s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 925.169468] env[62503]: DEBUG oslo_concurrency.lockutils [None req-9e86ee46-4d8c-4f28-84b2-a369260b22e3 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 925.181838] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f330a3c-d6fd-4ce6-9f20-72c77c735001 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.190100] env[62503]: INFO nova.compute.manager [-] [instance: 9ccdc727-536e-4db8-bad4-960858254758] Took 1.28 seconds to deallocate network for instance. [ 925.192885] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c2811c0-a9c6-493d-8b42-60acd31e6dff {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.227323] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea57e49d-aef1-4f9c-8daa-1508b64ceaa5 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.235810] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c16ae27c-d1ea-4c19-a9cf-256972915487 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.252607] env[62503]: DEBUG nova.compute.provider_tree [None req-f609ecce-b0c9-40f4-a25e-33556aca01b7 tempest-InstanceActionsV221TestJSON-959512409 tempest-InstanceActionsV221TestJSON-959512409-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 925.537493] env[62503]: DEBUG oslo_concurrency.lockutils [None req-7d4fd531-7e25-4cab-99a2-9ab02a01b538 tempest-ServerTagsTestJSON-53821113 tempest-ServerTagsTestJSON-53821113-project-member] Acquiring lock "c02c01b0-6f69-424f-8c0f-5a5c9ce1fad7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 925.537795] env[62503]: DEBUG oslo_concurrency.lockutils [None req-7d4fd531-7e25-4cab-99a2-9ab02a01b538 tempest-ServerTagsTestJSON-53821113 tempest-ServerTagsTestJSON-53821113-project-member] Lock "c02c01b0-6f69-424f-8c0f-5a5c9ce1fad7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 925.538015] env[62503]: DEBUG oslo_concurrency.lockutils [None req-7d4fd531-7e25-4cab-99a2-9ab02a01b538 tempest-ServerTagsTestJSON-53821113 tempest-ServerTagsTestJSON-53821113-project-member] Acquiring lock "c02c01b0-6f69-424f-8c0f-5a5c9ce1fad7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 925.538218] env[62503]: DEBUG oslo_concurrency.lockutils [None req-7d4fd531-7e25-4cab-99a2-9ab02a01b538 tempest-ServerTagsTestJSON-53821113 tempest-ServerTagsTestJSON-53821113-project-member] Lock "c02c01b0-6f69-424f-8c0f-5a5c9ce1fad7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 925.538388] env[62503]: DEBUG oslo_concurrency.lockutils [None req-7d4fd531-7e25-4cab-99a2-9ab02a01b538 tempest-ServerTagsTestJSON-53821113 tempest-ServerTagsTestJSON-53821113-project-member] Lock "c02c01b0-6f69-424f-8c0f-5a5c9ce1fad7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 925.540965] env[62503]: INFO nova.compute.manager [None req-7d4fd531-7e25-4cab-99a2-9ab02a01b538 tempest-ServerTagsTestJSON-53821113 tempest-ServerTagsTestJSON-53821113-project-member] [instance: c02c01b0-6f69-424f-8c0f-5a5c9ce1fad7] Terminating instance [ 925.542880] env[62503]: DEBUG nova.compute.manager [None req-7d4fd531-7e25-4cab-99a2-9ab02a01b538 tempest-ServerTagsTestJSON-53821113 tempest-ServerTagsTestJSON-53821113-project-member] [instance: c02c01b0-6f69-424f-8c0f-5a5c9ce1fad7] Start destroying the instance on the hypervisor. {{(pid=62503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3156}} [ 925.543118] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-7d4fd531-7e25-4cab-99a2-9ab02a01b538 tempest-ServerTagsTestJSON-53821113 tempest-ServerTagsTestJSON-53821113-project-member] [instance: c02c01b0-6f69-424f-8c0f-5a5c9ce1fad7] Destroying instance {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 925.543934] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-927b173e-9ee5-41b0-98d3-7bc9f921a695 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.551930] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-7d4fd531-7e25-4cab-99a2-9ab02a01b538 tempest-ServerTagsTestJSON-53821113 tempest-ServerTagsTestJSON-53821113-project-member] [instance: c02c01b0-6f69-424f-8c0f-5a5c9ce1fad7] Powering off the VM {{(pid=62503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 925.552182] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b0ca7d06-f9e2-4b1d-8637-dcd9558e1311 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.558172] env[62503]: DEBUG oslo_vmware.api [None req-7d4fd531-7e25-4cab-99a2-9ab02a01b538 tempest-ServerTagsTestJSON-53821113 tempest-ServerTagsTestJSON-53821113-project-member] Waiting for the task: (returnval){ [ 925.558172] env[62503]: value = "task-1387949" [ 925.558172] env[62503]: _type = "Task" [ 925.558172] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 925.566238] env[62503]: DEBUG oslo_vmware.api [None req-7d4fd531-7e25-4cab-99a2-9ab02a01b538 tempest-ServerTagsTestJSON-53821113 tempest-ServerTagsTestJSON-53821113-project-member] Task: {'id': task-1387949, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.700934] env[62503]: DEBUG oslo_concurrency.lockutils [None req-f7331f54-a763-48c7-a6c8-0aa9c4ad457f tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 925.755776] env[62503]: DEBUG nova.scheduler.client.report [None req-f609ecce-b0c9-40f4-a25e-33556aca01b7 tempest-InstanceActionsV221TestJSON-959512409 tempest-InstanceActionsV221TestJSON-959512409-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 925.860257] env[62503]: DEBUG nova.network.neutron [-] [instance: 04054a79-70b6-409a-981f-6bf99fc3b4fc] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 926.072994] env[62503]: DEBUG oslo_vmware.api [None req-7d4fd531-7e25-4cab-99a2-9ab02a01b538 tempest-ServerTagsTestJSON-53821113 tempest-ServerTagsTestJSON-53821113-project-member] Task: {'id': task-1387949, 'name': PowerOffVM_Task, 'duration_secs': 0.164681} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 926.073457] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-7d4fd531-7e25-4cab-99a2-9ab02a01b538 tempest-ServerTagsTestJSON-53821113 tempest-ServerTagsTestJSON-53821113-project-member] [instance: c02c01b0-6f69-424f-8c0f-5a5c9ce1fad7] Powered off the VM {{(pid=62503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 926.073740] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-7d4fd531-7e25-4cab-99a2-9ab02a01b538 tempest-ServerTagsTestJSON-53821113 tempest-ServerTagsTestJSON-53821113-project-member] [instance: c02c01b0-6f69-424f-8c0f-5a5c9ce1fad7] Unregistering the VM {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 926.074552] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-dbb2e92a-36fc-4bce-a55c-431622b0ee0e {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.138500] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-7d4fd531-7e25-4cab-99a2-9ab02a01b538 tempest-ServerTagsTestJSON-53821113 tempest-ServerTagsTestJSON-53821113-project-member] [instance: c02c01b0-6f69-424f-8c0f-5a5c9ce1fad7] Unregistered the VM {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 926.138580] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-7d4fd531-7e25-4cab-99a2-9ab02a01b538 tempest-ServerTagsTestJSON-53821113 tempest-ServerTagsTestJSON-53821113-project-member] [instance: c02c01b0-6f69-424f-8c0f-5a5c9ce1fad7] Deleting contents of the VM from datastore datastore2 {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 926.138897] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-7d4fd531-7e25-4cab-99a2-9ab02a01b538 tempest-ServerTagsTestJSON-53821113 tempest-ServerTagsTestJSON-53821113-project-member] Deleting the datastore file [datastore2] c02c01b0-6f69-424f-8c0f-5a5c9ce1fad7 {{(pid=62503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 926.139193] env[62503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ca41073b-d8c3-46b8-ad15-90f5323ff700 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.146039] env[62503]: DEBUG oslo_vmware.api [None req-7d4fd531-7e25-4cab-99a2-9ab02a01b538 tempest-ServerTagsTestJSON-53821113 tempest-ServerTagsTestJSON-53821113-project-member] Waiting for the task: (returnval){ [ 926.146039] env[62503]: value = "task-1387951" [ 926.146039] env[62503]: _type = "Task" [ 926.146039] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 926.153685] env[62503]: DEBUG oslo_vmware.api [None req-7d4fd531-7e25-4cab-99a2-9ab02a01b538 tempest-ServerTagsTestJSON-53821113 tempest-ServerTagsTestJSON-53821113-project-member] Task: {'id': task-1387951, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.245124] env[62503]: DEBUG nova.compute.manager [req-130ace08-59e3-408e-ba65-1beb3899d78f req-259013b8-c752-46df-8d81-aeb9a00c7cd5 service nova] [instance: 04054a79-70b6-409a-981f-6bf99fc3b4fc] Received event network-vif-deleted-a99ea24a-aa97-4bea-9fa6-904a83e3aaa0 {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 926.264054] env[62503]: DEBUG oslo_concurrency.lockutils [None req-f609ecce-b0c9-40f4-a25e-33556aca01b7 tempest-InstanceActionsV221TestJSON-959512409 tempest-InstanceActionsV221TestJSON-959512409-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.264s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 926.264054] env[62503]: DEBUG nova.compute.manager [None req-f609ecce-b0c9-40f4-a25e-33556aca01b7 tempest-InstanceActionsV221TestJSON-959512409 tempest-InstanceActionsV221TestJSON-959512409-project-member] [instance: 35bd28b5-101e-429f-8487-fbe5bf3528fb] Start building networks asynchronously for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2832}} [ 926.266343] env[62503]: DEBUG oslo_concurrency.lockutils [None req-4dba9f2f-5a4d-4912-afbf-c0546063fc7e tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.757s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 926.270119] env[62503]: DEBUG nova.objects.instance [None req-4dba9f2f-5a4d-4912-afbf-c0546063fc7e tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Lazy-loading 'resources' on Instance uuid e7556915-634f-40d6-9e7f-da1c3201d8e4 {{(pid=62503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 926.363014] env[62503]: INFO nova.compute.manager [-] [instance: 04054a79-70b6-409a-981f-6bf99fc3b4fc] Took 1.33 seconds to deallocate network for instance. [ 926.656894] env[62503]: DEBUG oslo_vmware.api [None req-7d4fd531-7e25-4cab-99a2-9ab02a01b538 tempest-ServerTagsTestJSON-53821113 tempest-ServerTagsTestJSON-53821113-project-member] Task: {'id': task-1387951, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.126667} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 926.657311] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-7d4fd531-7e25-4cab-99a2-9ab02a01b538 tempest-ServerTagsTestJSON-53821113 tempest-ServerTagsTestJSON-53821113-project-member] Deleted the datastore file {{(pid=62503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 926.657515] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-7d4fd531-7e25-4cab-99a2-9ab02a01b538 tempest-ServerTagsTestJSON-53821113 tempest-ServerTagsTestJSON-53821113-project-member] [instance: c02c01b0-6f69-424f-8c0f-5a5c9ce1fad7] Deleted contents of the VM from datastore datastore2 {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 926.657695] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-7d4fd531-7e25-4cab-99a2-9ab02a01b538 tempest-ServerTagsTestJSON-53821113 tempest-ServerTagsTestJSON-53821113-project-member] [instance: c02c01b0-6f69-424f-8c0f-5a5c9ce1fad7] Instance destroyed {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 926.657870] env[62503]: INFO nova.compute.manager [None req-7d4fd531-7e25-4cab-99a2-9ab02a01b538 tempest-ServerTagsTestJSON-53821113 tempest-ServerTagsTestJSON-53821113-project-member] [instance: c02c01b0-6f69-424f-8c0f-5a5c9ce1fad7] Took 1.11 seconds to destroy the instance on the hypervisor. [ 926.658127] env[62503]: DEBUG oslo.service.loopingcall [None req-7d4fd531-7e25-4cab-99a2-9ab02a01b538 tempest-ServerTagsTestJSON-53821113 tempest-ServerTagsTestJSON-53821113-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 926.658683] env[62503]: DEBUG nova.compute.manager [-] [instance: c02c01b0-6f69-424f-8c0f-5a5c9ce1fad7] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 926.658800] env[62503]: DEBUG nova.network.neutron [-] [instance: c02c01b0-6f69-424f-8c0f-5a5c9ce1fad7] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 926.728072] env[62503]: DEBUG oslo_concurrency.lockutils [None req-4a0ddf77-6ecd-4dd8-9f92-8098ccbc1ed1 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Acquiring lock "c9129f68-c755-4b78-b067-b77b01048c02" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 926.728386] env[62503]: DEBUG oslo_concurrency.lockutils [None req-4a0ddf77-6ecd-4dd8-9f92-8098ccbc1ed1 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Lock "c9129f68-c755-4b78-b067-b77b01048c02" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 926.728624] env[62503]: DEBUG oslo_concurrency.lockutils [None req-4a0ddf77-6ecd-4dd8-9f92-8098ccbc1ed1 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Acquiring lock "c9129f68-c755-4b78-b067-b77b01048c02-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 926.728849] env[62503]: DEBUG oslo_concurrency.lockutils [None req-4a0ddf77-6ecd-4dd8-9f92-8098ccbc1ed1 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Lock "c9129f68-c755-4b78-b067-b77b01048c02-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 926.729100] env[62503]: DEBUG oslo_concurrency.lockutils [None req-4a0ddf77-6ecd-4dd8-9f92-8098ccbc1ed1 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Lock "c9129f68-c755-4b78-b067-b77b01048c02-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 926.731466] env[62503]: INFO nova.compute.manager [None req-4a0ddf77-6ecd-4dd8-9f92-8098ccbc1ed1 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: c9129f68-c755-4b78-b067-b77b01048c02] Terminating instance [ 926.733373] env[62503]: DEBUG nova.compute.manager [None req-4a0ddf77-6ecd-4dd8-9f92-8098ccbc1ed1 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: c9129f68-c755-4b78-b067-b77b01048c02] Start destroying the instance on the hypervisor. {{(pid=62503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3156}} [ 926.733595] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-4a0ddf77-6ecd-4dd8-9f92-8098ccbc1ed1 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: c9129f68-c755-4b78-b067-b77b01048c02] Destroying instance {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 926.734449] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbb891a6-ac43-4b4b-8f32-8ecaf4235fdf {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.742571] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a0ddf77-6ecd-4dd8-9f92-8098ccbc1ed1 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: c9129f68-c755-4b78-b067-b77b01048c02] Powering off the VM {{(pid=62503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 926.742858] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9c90740d-1a15-4945-8019-d140ca2ee8ce {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.749599] env[62503]: DEBUG oslo_vmware.api [None req-4a0ddf77-6ecd-4dd8-9f92-8098ccbc1ed1 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Waiting for the task: (returnval){ [ 926.749599] env[62503]: value = "task-1387952" [ 926.749599] env[62503]: _type = "Task" [ 926.749599] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 926.760950] env[62503]: DEBUG oslo_vmware.api [None req-4a0ddf77-6ecd-4dd8-9f92-8098ccbc1ed1 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Task: {'id': task-1387952, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.771965] env[62503]: DEBUG nova.compute.utils [None req-f609ecce-b0c9-40f4-a25e-33556aca01b7 tempest-InstanceActionsV221TestJSON-959512409 tempest-InstanceActionsV221TestJSON-959512409-project-member] Using /dev/sd instead of None {{(pid=62503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 926.774598] env[62503]: DEBUG nova.compute.manager [None req-f609ecce-b0c9-40f4-a25e-33556aca01b7 tempest-InstanceActionsV221TestJSON-959512409 tempest-InstanceActionsV221TestJSON-959512409-project-member] [instance: 35bd28b5-101e-429f-8487-fbe5bf3528fb] Allocating IP information in the background. {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 926.774871] env[62503]: DEBUG nova.network.neutron [None req-f609ecce-b0c9-40f4-a25e-33556aca01b7 tempest-InstanceActionsV221TestJSON-959512409 tempest-InstanceActionsV221TestJSON-959512409-project-member] [instance: 35bd28b5-101e-429f-8487-fbe5bf3528fb] allocate_for_instance() {{(pid=62503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 926.828330] env[62503]: DEBUG nova.policy [None req-f609ecce-b0c9-40f4-a25e-33556aca01b7 tempest-InstanceActionsV221TestJSON-959512409 tempest-InstanceActionsV221TestJSON-959512409-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ace91590bb7e414698ae6266d88c5984', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4d53e6317ef2436096ad618f28f0c2b8', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62503) authorize /opt/stack/nova/nova/policy.py:201}} [ 926.871413] env[62503]: DEBUG oslo_concurrency.lockutils [None req-eadec515-4bb7-4564-9192-b80cbbecb99d tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 926.983072] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c77091e2-e2a5-4fcd-b6ed-5c3da458ef99 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.990683] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c58bbeb4-620e-4920-9a6e-65229c5eb8f6 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.020051] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c673c037-94e8-4743-8db0-d6dbe9ee33b6 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.027582] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35672dd5-66af-46cf-8c0e-023a06a26cfd {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.040756] env[62503]: DEBUG nova.compute.provider_tree [None req-4dba9f2f-5a4d-4912-afbf-c0546063fc7e tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 927.094275] env[62503]: DEBUG nova.network.neutron [None req-f609ecce-b0c9-40f4-a25e-33556aca01b7 tempest-InstanceActionsV221TestJSON-959512409 tempest-InstanceActionsV221TestJSON-959512409-project-member] [instance: 35bd28b5-101e-429f-8487-fbe5bf3528fb] Successfully created port: ab44a375-9f88-4434-9a19-932e1ea594f1 {{(pid=62503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 927.234972] env[62503]: DEBUG oslo_concurrency.lockutils [None req-74f2d880-0894-4c8b-96f9-91645aaf1004 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Acquiring lock "68f0c60d-ceff-4d7a-b81d-4845b4c5134c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 927.235209] env[62503]: DEBUG oslo_concurrency.lockutils [None req-74f2d880-0894-4c8b-96f9-91645aaf1004 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Lock "68f0c60d-ceff-4d7a-b81d-4845b4c5134c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 927.259545] env[62503]: DEBUG oslo_vmware.api [None req-4a0ddf77-6ecd-4dd8-9f92-8098ccbc1ed1 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Task: {'id': task-1387952, 'name': PowerOffVM_Task, 'duration_secs': 0.19411} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 927.259804] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a0ddf77-6ecd-4dd8-9f92-8098ccbc1ed1 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: c9129f68-c755-4b78-b067-b77b01048c02] Powered off the VM {{(pid=62503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 927.259978] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-4a0ddf77-6ecd-4dd8-9f92-8098ccbc1ed1 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: c9129f68-c755-4b78-b067-b77b01048c02] Unregistering the VM {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 927.260360] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b896dc0c-94f5-4980-b01e-9a0c95fe80d3 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.272189] env[62503]: DEBUG oslo_concurrency.lockutils [None req-4af1549b-3fbf-4f0a-aa84-7c2e2f786268 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Acquiring lock "e693bcc2-3883-466d-913c-831146ca81e7" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 927.272422] env[62503]: DEBUG oslo_concurrency.lockutils [None req-4af1549b-3fbf-4f0a-aa84-7c2e2f786268 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Lock "e693bcc2-3883-466d-913c-831146ca81e7" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 927.280906] env[62503]: DEBUG nova.compute.manager [None req-f609ecce-b0c9-40f4-a25e-33556aca01b7 tempest-InstanceActionsV221TestJSON-959512409 tempest-InstanceActionsV221TestJSON-959512409-project-member] [instance: 35bd28b5-101e-429f-8487-fbe5bf3528fb] Start building block device mappings for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2867}} [ 927.346750] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-4a0ddf77-6ecd-4dd8-9f92-8098ccbc1ed1 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: c9129f68-c755-4b78-b067-b77b01048c02] Unregistered the VM {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 927.346990] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-4a0ddf77-6ecd-4dd8-9f92-8098ccbc1ed1 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: c9129f68-c755-4b78-b067-b77b01048c02] Deleting contents of the VM from datastore datastore2 {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 927.347191] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a0ddf77-6ecd-4dd8-9f92-8098ccbc1ed1 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Deleting the datastore file [datastore2] c9129f68-c755-4b78-b067-b77b01048c02 {{(pid=62503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 927.347743] env[62503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3fc3457b-7f49-410c-b6a9-3bf8ee227440 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.354309] env[62503]: DEBUG oslo_vmware.api [None req-4a0ddf77-6ecd-4dd8-9f92-8098ccbc1ed1 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Waiting for the task: (returnval){ [ 927.354309] env[62503]: value = "task-1387954" [ 927.354309] env[62503]: _type = "Task" [ 927.354309] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 927.361871] env[62503]: DEBUG oslo_vmware.api [None req-4a0ddf77-6ecd-4dd8-9f92-8098ccbc1ed1 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Task: {'id': task-1387954, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.406725] env[62503]: DEBUG nova.network.neutron [-] [instance: c02c01b0-6f69-424f-8c0f-5a5c9ce1fad7] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 927.545977] env[62503]: DEBUG nova.scheduler.client.report [None req-4dba9f2f-5a4d-4912-afbf-c0546063fc7e tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 927.737322] env[62503]: DEBUG nova.compute.manager [None req-74f2d880-0894-4c8b-96f9-91645aaf1004 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: 68f0c60d-ceff-4d7a-b81d-4845b4c5134c] Starting instance... {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 927.775776] env[62503]: DEBUG nova.compute.utils [None req-4af1549b-3fbf-4f0a-aa84-7c2e2f786268 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Using /dev/sd instead of None {{(pid=62503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 927.864428] env[62503]: DEBUG oslo_vmware.api [None req-4a0ddf77-6ecd-4dd8-9f92-8098ccbc1ed1 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Task: {'id': task-1387954, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.134867} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 927.864705] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a0ddf77-6ecd-4dd8-9f92-8098ccbc1ed1 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Deleted the datastore file {{(pid=62503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 927.864888] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-4a0ddf77-6ecd-4dd8-9f92-8098ccbc1ed1 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: c9129f68-c755-4b78-b067-b77b01048c02] Deleted contents of the VM from datastore datastore2 {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 927.865037] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-4a0ddf77-6ecd-4dd8-9f92-8098ccbc1ed1 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: c9129f68-c755-4b78-b067-b77b01048c02] Instance destroyed {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 927.865220] env[62503]: INFO nova.compute.manager [None req-4a0ddf77-6ecd-4dd8-9f92-8098ccbc1ed1 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: c9129f68-c755-4b78-b067-b77b01048c02] Took 1.13 seconds to destroy the instance on the hypervisor. [ 927.865465] env[62503]: DEBUG oslo.service.loopingcall [None req-4a0ddf77-6ecd-4dd8-9f92-8098ccbc1ed1 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 927.865658] env[62503]: DEBUG nova.compute.manager [-] [instance: c9129f68-c755-4b78-b067-b77b01048c02] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 927.865750] env[62503]: DEBUG nova.network.neutron [-] [instance: c9129f68-c755-4b78-b067-b77b01048c02] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 927.911694] env[62503]: INFO nova.compute.manager [-] [instance: c02c01b0-6f69-424f-8c0f-5a5c9ce1fad7] Took 1.25 seconds to deallocate network for instance. [ 928.051720] env[62503]: DEBUG oslo_concurrency.lockutils [None req-4dba9f2f-5a4d-4912-afbf-c0546063fc7e tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.785s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 928.054727] env[62503]: DEBUG oslo_concurrency.lockutils [None req-5d3efbe8-4cdd-4e3e-a6a1-a7d4876131b5 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.782s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 928.056413] env[62503]: INFO nova.compute.claims [None req-5d3efbe8-4cdd-4e3e-a6a1-a7d4876131b5 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: 09ff6008-f1eb-4ee0-af7d-c7f8268e3eb9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 928.075885] env[62503]: INFO nova.scheduler.client.report [None req-4dba9f2f-5a4d-4912-afbf-c0546063fc7e tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Deleted allocations for instance e7556915-634f-40d6-9e7f-da1c3201d8e4 [ 928.255737] env[62503]: DEBUG oslo_concurrency.lockutils [None req-74f2d880-0894-4c8b-96f9-91645aaf1004 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 928.271456] env[62503]: DEBUG nova.compute.manager [req-56fa200b-8c7f-4f4a-a9bf-0d8ba163bbd9 req-940e8522-6e7e-4a6e-9de9-0b8a98dfc0d7 service nova] [instance: c02c01b0-6f69-424f-8c0f-5a5c9ce1fad7] Received event network-vif-deleted-21de8ca1-8ec9-4ca1-8a56-68c6d367064e {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 928.271667] env[62503]: DEBUG nova.compute.manager [req-56fa200b-8c7f-4f4a-a9bf-0d8ba163bbd9 req-940e8522-6e7e-4a6e-9de9-0b8a98dfc0d7 service nova] [instance: c9129f68-c755-4b78-b067-b77b01048c02] Received event network-vif-deleted-792806f8-f6ea-4abd-9085-2a2ce83df26b {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 928.271880] env[62503]: INFO nova.compute.manager [req-56fa200b-8c7f-4f4a-a9bf-0d8ba163bbd9 req-940e8522-6e7e-4a6e-9de9-0b8a98dfc0d7 service nova] [instance: c9129f68-c755-4b78-b067-b77b01048c02] Neutron deleted interface 792806f8-f6ea-4abd-9085-2a2ce83df26b; detaching it from the instance and deleting it from the info cache [ 928.272155] env[62503]: DEBUG nova.network.neutron [req-56fa200b-8c7f-4f4a-a9bf-0d8ba163bbd9 req-940e8522-6e7e-4a6e-9de9-0b8a98dfc0d7 service nova] [instance: c9129f68-c755-4b78-b067-b77b01048c02] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 928.278725] env[62503]: DEBUG oslo_concurrency.lockutils [None req-4af1549b-3fbf-4f0a-aa84-7c2e2f786268 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Lock "e693bcc2-3883-466d-913c-831146ca81e7" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 928.293633] env[62503]: DEBUG nova.compute.manager [None req-f609ecce-b0c9-40f4-a25e-33556aca01b7 tempest-InstanceActionsV221TestJSON-959512409 tempest-InstanceActionsV221TestJSON-959512409-project-member] [instance: 35bd28b5-101e-429f-8487-fbe5bf3528fb] Start spawning the instance on the hypervisor. {{(pid=62503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2641}} [ 928.318348] env[62503]: DEBUG nova.virt.hardware [None req-f609ecce-b0c9-40f4-a25e-33556aca01b7 tempest-InstanceActionsV221TestJSON-959512409 tempest-InstanceActionsV221TestJSON-959512409-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:26:20Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:26:03Z,direct_url=,disk_format='vmdk',id=8150ca02-f879-471d-8913-459408f127a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26d9ee75d25b4018b8bb1fb11c8bd98c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:26:04Z,virtual_size=,visibility=), allow threads: False {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 928.318662] env[62503]: DEBUG nova.virt.hardware [None req-f609ecce-b0c9-40f4-a25e-33556aca01b7 tempest-InstanceActionsV221TestJSON-959512409 tempest-InstanceActionsV221TestJSON-959512409-project-member] Flavor limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 928.318901] env[62503]: DEBUG nova.virt.hardware [None req-f609ecce-b0c9-40f4-a25e-33556aca01b7 tempest-InstanceActionsV221TestJSON-959512409 tempest-InstanceActionsV221TestJSON-959512409-project-member] Image limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 928.319179] env[62503]: DEBUG nova.virt.hardware [None req-f609ecce-b0c9-40f4-a25e-33556aca01b7 tempest-InstanceActionsV221TestJSON-959512409 tempest-InstanceActionsV221TestJSON-959512409-project-member] Flavor pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 928.319398] env[62503]: DEBUG nova.virt.hardware [None req-f609ecce-b0c9-40f4-a25e-33556aca01b7 tempest-InstanceActionsV221TestJSON-959512409 tempest-InstanceActionsV221TestJSON-959512409-project-member] Image pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 928.319623] env[62503]: DEBUG nova.virt.hardware [None req-f609ecce-b0c9-40f4-a25e-33556aca01b7 tempest-InstanceActionsV221TestJSON-959512409 tempest-InstanceActionsV221TestJSON-959512409-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 928.319906] env[62503]: DEBUG nova.virt.hardware [None req-f609ecce-b0c9-40f4-a25e-33556aca01b7 tempest-InstanceActionsV221TestJSON-959512409 tempest-InstanceActionsV221TestJSON-959512409-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 928.320159] env[62503]: DEBUG nova.virt.hardware [None req-f609ecce-b0c9-40f4-a25e-33556aca01b7 tempest-InstanceActionsV221TestJSON-959512409 tempest-InstanceActionsV221TestJSON-959512409-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 928.320409] env[62503]: DEBUG nova.virt.hardware [None req-f609ecce-b0c9-40f4-a25e-33556aca01b7 tempest-InstanceActionsV221TestJSON-959512409 tempest-InstanceActionsV221TestJSON-959512409-project-member] Got 1 possible topologies {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 928.320648] env[62503]: DEBUG nova.virt.hardware [None req-f609ecce-b0c9-40f4-a25e-33556aca01b7 tempest-InstanceActionsV221TestJSON-959512409 tempest-InstanceActionsV221TestJSON-959512409-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 928.320924] env[62503]: DEBUG nova.virt.hardware [None req-f609ecce-b0c9-40f4-a25e-33556aca01b7 tempest-InstanceActionsV221TestJSON-959512409 tempest-InstanceActionsV221TestJSON-959512409-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 928.322058] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ae3e587-7c96-4191-8ada-2932a4bcd148 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.329892] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a1e4b51-ceb0-4fbe-81d0-43198abde647 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.417538] env[62503]: DEBUG oslo_concurrency.lockutils [None req-7d4fd531-7e25-4cab-99a2-9ab02a01b538 tempest-ServerTagsTestJSON-53821113 tempest-ServerTagsTestJSON-53821113-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 928.504986] env[62503]: DEBUG nova.compute.manager [req-012830f3-1293-4e4e-ac18-d13aafbcf219 req-c1987d2b-1f63-45c0-b0fd-49b56ee92da4 service nova] [instance: 35bd28b5-101e-429f-8487-fbe5bf3528fb] Received event network-vif-plugged-ab44a375-9f88-4434-9a19-932e1ea594f1 {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 928.505232] env[62503]: DEBUG oslo_concurrency.lockutils [req-012830f3-1293-4e4e-ac18-d13aafbcf219 req-c1987d2b-1f63-45c0-b0fd-49b56ee92da4 service nova] Acquiring lock "35bd28b5-101e-429f-8487-fbe5bf3528fb-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 928.505449] env[62503]: DEBUG oslo_concurrency.lockutils [req-012830f3-1293-4e4e-ac18-d13aafbcf219 req-c1987d2b-1f63-45c0-b0fd-49b56ee92da4 service nova] Lock "35bd28b5-101e-429f-8487-fbe5bf3528fb-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 928.505633] env[62503]: DEBUG oslo_concurrency.lockutils [req-012830f3-1293-4e4e-ac18-d13aafbcf219 req-c1987d2b-1f63-45c0-b0fd-49b56ee92da4 service nova] Lock "35bd28b5-101e-429f-8487-fbe5bf3528fb-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 928.506020] env[62503]: DEBUG nova.compute.manager [req-012830f3-1293-4e4e-ac18-d13aafbcf219 req-c1987d2b-1f63-45c0-b0fd-49b56ee92da4 service nova] [instance: 35bd28b5-101e-429f-8487-fbe5bf3528fb] No waiting events found dispatching network-vif-plugged-ab44a375-9f88-4434-9a19-932e1ea594f1 {{(pid=62503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 928.506475] env[62503]: WARNING nova.compute.manager [req-012830f3-1293-4e4e-ac18-d13aafbcf219 req-c1987d2b-1f63-45c0-b0fd-49b56ee92da4 service nova] [instance: 35bd28b5-101e-429f-8487-fbe5bf3528fb] Received unexpected event network-vif-plugged-ab44a375-9f88-4434-9a19-932e1ea594f1 for instance with vm_state building and task_state spawning. [ 928.563899] env[62503]: DEBUG nova.network.neutron [None req-f609ecce-b0c9-40f4-a25e-33556aca01b7 tempest-InstanceActionsV221TestJSON-959512409 tempest-InstanceActionsV221TestJSON-959512409-project-member] [instance: 35bd28b5-101e-429f-8487-fbe5bf3528fb] Successfully updated port: ab44a375-9f88-4434-9a19-932e1ea594f1 {{(pid=62503) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 928.583628] env[62503]: DEBUG oslo_concurrency.lockutils [None req-4dba9f2f-5a4d-4912-afbf-c0546063fc7e tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Lock "e7556915-634f-40d6-9e7f-da1c3201d8e4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.632s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 928.594290] env[62503]: DEBUG nova.network.neutron [-] [instance: c9129f68-c755-4b78-b067-b77b01048c02] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 928.775564] env[62503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d2b8a66e-e961-49f5-bcd9-c0ef038b8d69 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.785577] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e73908dd-beae-4a62-8675-7aaacf849637 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.810068] env[62503]: DEBUG nova.compute.manager [req-56fa200b-8c7f-4f4a-a9bf-0d8ba163bbd9 req-940e8522-6e7e-4a6e-9de9-0b8a98dfc0d7 service nova] [instance: c9129f68-c755-4b78-b067-b77b01048c02] Detach interface failed, port_id=792806f8-f6ea-4abd-9085-2a2ce83df26b, reason: Instance c9129f68-c755-4b78-b067-b77b01048c02 could not be found. {{(pid=62503) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11261}} [ 929.066577] env[62503]: DEBUG oslo_concurrency.lockutils [None req-f609ecce-b0c9-40f4-a25e-33556aca01b7 tempest-InstanceActionsV221TestJSON-959512409 tempest-InstanceActionsV221TestJSON-959512409-project-member] Acquiring lock "refresh_cache-35bd28b5-101e-429f-8487-fbe5bf3528fb" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 929.066774] env[62503]: DEBUG oslo_concurrency.lockutils [None req-f609ecce-b0c9-40f4-a25e-33556aca01b7 tempest-InstanceActionsV221TestJSON-959512409 tempest-InstanceActionsV221TestJSON-959512409-project-member] Acquired lock "refresh_cache-35bd28b5-101e-429f-8487-fbe5bf3528fb" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 929.067046] env[62503]: DEBUG nova.network.neutron [None req-f609ecce-b0c9-40f4-a25e-33556aca01b7 tempest-InstanceActionsV221TestJSON-959512409 tempest-InstanceActionsV221TestJSON-959512409-project-member] [instance: 35bd28b5-101e-429f-8487-fbe5bf3528fb] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 929.090041] env[62503]: DEBUG oslo_concurrency.lockutils [None req-753652d2-1501-4933-9a2e-a2a95b2bb77c tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Acquiring lock "7b8c670d-3f2a-431d-91da-4ced781e6e51" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 929.090323] env[62503]: DEBUG oslo_concurrency.lockutils [None req-753652d2-1501-4933-9a2e-a2a95b2bb77c tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Lock "7b8c670d-3f2a-431d-91da-4ced781e6e51" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 929.090560] env[62503]: DEBUG oslo_concurrency.lockutils [None req-753652d2-1501-4933-9a2e-a2a95b2bb77c tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Acquiring lock "7b8c670d-3f2a-431d-91da-4ced781e6e51-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 929.090807] env[62503]: DEBUG oslo_concurrency.lockutils [None req-753652d2-1501-4933-9a2e-a2a95b2bb77c tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Lock "7b8c670d-3f2a-431d-91da-4ced781e6e51-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 929.091024] env[62503]: DEBUG oslo_concurrency.lockutils [None req-753652d2-1501-4933-9a2e-a2a95b2bb77c tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Lock "7b8c670d-3f2a-431d-91da-4ced781e6e51-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 929.093664] env[62503]: INFO nova.compute.manager [None req-753652d2-1501-4933-9a2e-a2a95b2bb77c tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] [instance: 7b8c670d-3f2a-431d-91da-4ced781e6e51] Terminating instance [ 929.095149] env[62503]: DEBUG nova.compute.manager [None req-753652d2-1501-4933-9a2e-a2a95b2bb77c tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] [instance: 7b8c670d-3f2a-431d-91da-4ced781e6e51] Start destroying the instance on the hypervisor. {{(pid=62503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3156}} [ 929.095352] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-753652d2-1501-4933-9a2e-a2a95b2bb77c tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] [instance: 7b8c670d-3f2a-431d-91da-4ced781e6e51] Destroying instance {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 929.096192] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-124599f0-91c9-4b58-a966-cf343fdfbaf1 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.101111] env[62503]: INFO nova.compute.manager [-] [instance: c9129f68-c755-4b78-b067-b77b01048c02] Took 1.24 seconds to deallocate network for instance. [ 929.110030] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-753652d2-1501-4933-9a2e-a2a95b2bb77c tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] [instance: 7b8c670d-3f2a-431d-91da-4ced781e6e51] Powering off the VM {{(pid=62503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 929.110030] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2781a462-ac77-4845-9808-1410779a16b1 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.118766] env[62503]: DEBUG oslo_vmware.api [None req-753652d2-1501-4933-9a2e-a2a95b2bb77c tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Waiting for the task: (returnval){ [ 929.118766] env[62503]: value = "task-1387955" [ 929.118766] env[62503]: _type = "Task" [ 929.118766] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 929.128650] env[62503]: DEBUG oslo_vmware.api [None req-753652d2-1501-4933-9a2e-a2a95b2bb77c tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Task: {'id': task-1387955, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.250318] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09f20541-c6f8-45d9-90c6-3246174b482d {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.260226] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82cc5911-42a9-41de-b84c-c2fd7d6cd3e5 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.299547] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce6e7e01-13de-460e-bb05-8767b57b3f7b {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.307759] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-617eeeea-c05d-4f98-85dd-544f4e9b0f0c {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.320593] env[62503]: DEBUG nova.compute.provider_tree [None req-5d3efbe8-4cdd-4e3e-a6a1-a7d4876131b5 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 929.345556] env[62503]: DEBUG oslo_concurrency.lockutils [None req-4af1549b-3fbf-4f0a-aa84-7c2e2f786268 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Acquiring lock "e693bcc2-3883-466d-913c-831146ca81e7" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 929.345793] env[62503]: DEBUG oslo_concurrency.lockutils [None req-4af1549b-3fbf-4f0a-aa84-7c2e2f786268 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Lock "e693bcc2-3883-466d-913c-831146ca81e7" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 929.346046] env[62503]: INFO nova.compute.manager [None req-4af1549b-3fbf-4f0a-aa84-7c2e2f786268 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: e693bcc2-3883-466d-913c-831146ca81e7] Attaching volume 628fd04b-dba0-424f-9dcd-1fc8f4074db1 to /dev/sdb [ 929.375185] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7400b3d-d254-434b-baa6-cf3677d4e39e {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.381959] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-993b76f0-6b5d-4a83-aa08-42212cc45acc {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.394952] env[62503]: DEBUG nova.virt.block_device [None req-4af1549b-3fbf-4f0a-aa84-7c2e2f786268 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: e693bcc2-3883-466d-913c-831146ca81e7] Updating existing volume attachment record: b56b2401-1952-4ee0-b09d-a19b252cb7a2 {{(pid=62503) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 929.599236] env[62503]: DEBUG nova.network.neutron [None req-f609ecce-b0c9-40f4-a25e-33556aca01b7 tempest-InstanceActionsV221TestJSON-959512409 tempest-InstanceActionsV221TestJSON-959512409-project-member] [instance: 35bd28b5-101e-429f-8487-fbe5bf3528fb] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 929.610519] env[62503]: DEBUG oslo_concurrency.lockutils [None req-4a0ddf77-6ecd-4dd8-9f92-8098ccbc1ed1 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 929.628342] env[62503]: DEBUG oslo_vmware.api [None req-753652d2-1501-4933-9a2e-a2a95b2bb77c tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Task: {'id': task-1387955, 'name': PowerOffVM_Task, 'duration_secs': 0.187898} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 929.628613] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-753652d2-1501-4933-9a2e-a2a95b2bb77c tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] [instance: 7b8c670d-3f2a-431d-91da-4ced781e6e51] Powered off the VM {{(pid=62503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 929.628867] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-753652d2-1501-4933-9a2e-a2a95b2bb77c tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] [instance: 7b8c670d-3f2a-431d-91da-4ced781e6e51] Unregistering the VM {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 929.629239] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d2fbb278-2924-46d2-9d24-84af6febbd9b {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.691207] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-753652d2-1501-4933-9a2e-a2a95b2bb77c tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] [instance: 7b8c670d-3f2a-431d-91da-4ced781e6e51] Unregistered the VM {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 929.691422] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-753652d2-1501-4933-9a2e-a2a95b2bb77c tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] [instance: 7b8c670d-3f2a-431d-91da-4ced781e6e51] Deleting contents of the VM from datastore datastore1 {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 929.691606] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-753652d2-1501-4933-9a2e-a2a95b2bb77c tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Deleting the datastore file [datastore1] 7b8c670d-3f2a-431d-91da-4ced781e6e51 {{(pid=62503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 929.691892] env[62503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a69d7640-a4df-4386-b248-fce3c8ceeebc {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.698287] env[62503]: DEBUG oslo_vmware.api [None req-753652d2-1501-4933-9a2e-a2a95b2bb77c tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Waiting for the task: (returnval){ [ 929.698287] env[62503]: value = "task-1387960" [ 929.698287] env[62503]: _type = "Task" [ 929.698287] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 929.707608] env[62503]: DEBUG oslo_vmware.api [None req-753652d2-1501-4933-9a2e-a2a95b2bb77c tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Task: {'id': task-1387960, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.736684] env[62503]: DEBUG nova.network.neutron [None req-f609ecce-b0c9-40f4-a25e-33556aca01b7 tempest-InstanceActionsV221TestJSON-959512409 tempest-InstanceActionsV221TestJSON-959512409-project-member] [instance: 35bd28b5-101e-429f-8487-fbe5bf3528fb] Updating instance_info_cache with network_info: [{"id": "ab44a375-9f88-4434-9a19-932e1ea594f1", "address": "fa:16:3e:54:28:f9", "network": {"id": "4bd39623-d9ea-484f-adff-da8c244f3cdb", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-1552841154-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4d53e6317ef2436096ad618f28f0c2b8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c02dd284-ab80-451c-93eb-48c8360acb9c", "external-id": "nsx-vlan-transportzone-818", "segmentation_id": 818, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapab44a375-9f", "ovs_interfaceid": "ab44a375-9f88-4434-9a19-932e1ea594f1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 929.824650] env[62503]: DEBUG nova.scheduler.client.report [None req-5d3efbe8-4cdd-4e3e-a6a1-a7d4876131b5 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 930.208063] env[62503]: DEBUG oslo_vmware.api [None req-753652d2-1501-4933-9a2e-a2a95b2bb77c tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Task: {'id': task-1387960, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.151148} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 930.208316] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-753652d2-1501-4933-9a2e-a2a95b2bb77c tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Deleted the datastore file {{(pid=62503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 930.208500] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-753652d2-1501-4933-9a2e-a2a95b2bb77c tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] [instance: 7b8c670d-3f2a-431d-91da-4ced781e6e51] Deleted contents of the VM from datastore datastore1 {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 930.208680] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-753652d2-1501-4933-9a2e-a2a95b2bb77c tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] [instance: 7b8c670d-3f2a-431d-91da-4ced781e6e51] Instance destroyed {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 930.208868] env[62503]: INFO nova.compute.manager [None req-753652d2-1501-4933-9a2e-a2a95b2bb77c tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] [instance: 7b8c670d-3f2a-431d-91da-4ced781e6e51] Took 1.11 seconds to destroy the instance on the hypervisor. [ 930.209118] env[62503]: DEBUG oslo.service.loopingcall [None req-753652d2-1501-4933-9a2e-a2a95b2bb77c tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 930.209309] env[62503]: DEBUG nova.compute.manager [-] [instance: 7b8c670d-3f2a-431d-91da-4ced781e6e51] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 930.209411] env[62503]: DEBUG nova.network.neutron [-] [instance: 7b8c670d-3f2a-431d-91da-4ced781e6e51] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 930.240297] env[62503]: DEBUG oslo_concurrency.lockutils [None req-f609ecce-b0c9-40f4-a25e-33556aca01b7 tempest-InstanceActionsV221TestJSON-959512409 tempest-InstanceActionsV221TestJSON-959512409-project-member] Releasing lock "refresh_cache-35bd28b5-101e-429f-8487-fbe5bf3528fb" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 930.240726] env[62503]: DEBUG nova.compute.manager [None req-f609ecce-b0c9-40f4-a25e-33556aca01b7 tempest-InstanceActionsV221TestJSON-959512409 tempest-InstanceActionsV221TestJSON-959512409-project-member] [instance: 35bd28b5-101e-429f-8487-fbe5bf3528fb] Instance network_info: |[{"id": "ab44a375-9f88-4434-9a19-932e1ea594f1", "address": "fa:16:3e:54:28:f9", "network": {"id": "4bd39623-d9ea-484f-adff-da8c244f3cdb", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-1552841154-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4d53e6317ef2436096ad618f28f0c2b8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c02dd284-ab80-451c-93eb-48c8360acb9c", "external-id": "nsx-vlan-transportzone-818", "segmentation_id": 818, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapab44a375-9f", "ovs_interfaceid": "ab44a375-9f88-4434-9a19-932e1ea594f1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1999}} [ 930.241319] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-f609ecce-b0c9-40f4-a25e-33556aca01b7 tempest-InstanceActionsV221TestJSON-959512409 tempest-InstanceActionsV221TestJSON-959512409-project-member] [instance: 35bd28b5-101e-429f-8487-fbe5bf3528fb] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:54:28:f9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c02dd284-ab80-451c-93eb-48c8360acb9c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ab44a375-9f88-4434-9a19-932e1ea594f1', 'vif_model': 'vmxnet3'}] {{(pid=62503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 930.248774] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-f609ecce-b0c9-40f4-a25e-33556aca01b7 tempest-InstanceActionsV221TestJSON-959512409 tempest-InstanceActionsV221TestJSON-959512409-project-member] Creating folder: Project (4d53e6317ef2436096ad618f28f0c2b8). Parent ref: group-v294540. {{(pid=62503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 930.249996] env[62503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-335ad73f-5297-46ba-adc1-068b1a6ae6da {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.260126] env[62503]: INFO nova.virt.vmwareapi.vm_util [None req-f609ecce-b0c9-40f4-a25e-33556aca01b7 tempest-InstanceActionsV221TestJSON-959512409 tempest-InstanceActionsV221TestJSON-959512409-project-member] Created folder: Project (4d53e6317ef2436096ad618f28f0c2b8) in parent group-v294540. [ 930.260352] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-f609ecce-b0c9-40f4-a25e-33556aca01b7 tempest-InstanceActionsV221TestJSON-959512409 tempest-InstanceActionsV221TestJSON-959512409-project-member] Creating folder: Instances. Parent ref: group-v294613. {{(pid=62503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 930.260608] env[62503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-cc99e005-cedf-44d4-8de3-b9e500decb1b {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.269770] env[62503]: INFO nova.virt.vmwareapi.vm_util [None req-f609ecce-b0c9-40f4-a25e-33556aca01b7 tempest-InstanceActionsV221TestJSON-959512409 tempest-InstanceActionsV221TestJSON-959512409-project-member] Created folder: Instances in parent group-v294613. [ 930.270020] env[62503]: DEBUG oslo.service.loopingcall [None req-f609ecce-b0c9-40f4-a25e-33556aca01b7 tempest-InstanceActionsV221TestJSON-959512409 tempest-InstanceActionsV221TestJSON-959512409-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 930.270218] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 35bd28b5-101e-429f-8487-fbe5bf3528fb] Creating VM on the ESX host {{(pid=62503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 930.270423] env[62503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-20414128-4468-40d8-a0d2-ba8672844085 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.289673] env[62503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 930.289673] env[62503]: value = "task-1387963" [ 930.289673] env[62503]: _type = "Task" [ 930.289673] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 930.297702] env[62503]: DEBUG oslo_vmware.api [-] Task: {'id': task-1387963, 'name': CreateVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.328972] env[62503]: DEBUG oslo_concurrency.lockutils [None req-5d3efbe8-4cdd-4e3e-a6a1-a7d4876131b5 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.274s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 930.329535] env[62503]: DEBUG nova.compute.manager [None req-5d3efbe8-4cdd-4e3e-a6a1-a7d4876131b5 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: 09ff6008-f1eb-4ee0-af7d-c7f8268e3eb9] Start building networks asynchronously for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2832}} [ 930.332178] env[62503]: DEBUG oslo_concurrency.lockutils [None req-7eeeb217-fdb2-41f4-98af-448aa496c9ae tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.950s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 930.333963] env[62503]: INFO nova.compute.claims [None req-7eeeb217-fdb2-41f4-98af-448aa496c9ae tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: 5ba614a3-17be-4069-8219-f88f4d27aab9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 930.533737] env[62503]: DEBUG nova.compute.manager [req-aa1b81c8-598d-477c-95a8-b451ac7ce90c req-cd598901-af4d-4103-9b55-4bfea2b472ba service nova] [instance: 35bd28b5-101e-429f-8487-fbe5bf3528fb] Received event network-changed-ab44a375-9f88-4434-9a19-932e1ea594f1 {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 930.534103] env[62503]: DEBUG nova.compute.manager [req-aa1b81c8-598d-477c-95a8-b451ac7ce90c req-cd598901-af4d-4103-9b55-4bfea2b472ba service nova] [instance: 35bd28b5-101e-429f-8487-fbe5bf3528fb] Refreshing instance network info cache due to event network-changed-ab44a375-9f88-4434-9a19-932e1ea594f1. {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11432}} [ 930.534369] env[62503]: DEBUG oslo_concurrency.lockutils [req-aa1b81c8-598d-477c-95a8-b451ac7ce90c req-cd598901-af4d-4103-9b55-4bfea2b472ba service nova] Acquiring lock "refresh_cache-35bd28b5-101e-429f-8487-fbe5bf3528fb" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 930.534523] env[62503]: DEBUG oslo_concurrency.lockutils [req-aa1b81c8-598d-477c-95a8-b451ac7ce90c req-cd598901-af4d-4103-9b55-4bfea2b472ba service nova] Acquired lock "refresh_cache-35bd28b5-101e-429f-8487-fbe5bf3528fb" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 930.534707] env[62503]: DEBUG nova.network.neutron [req-aa1b81c8-598d-477c-95a8-b451ac7ce90c req-cd598901-af4d-4103-9b55-4bfea2b472ba service nova] [instance: 35bd28b5-101e-429f-8487-fbe5bf3528fb] Refreshing network info cache for port ab44a375-9f88-4434-9a19-932e1ea594f1 {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 930.800208] env[62503]: DEBUG oslo_vmware.api [-] Task: {'id': task-1387963, 'name': CreateVM_Task, 'duration_secs': 0.311977} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 930.800387] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 35bd28b5-101e-429f-8487-fbe5bf3528fb] Created VM on the ESX host {{(pid=62503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 930.801095] env[62503]: DEBUG oslo_concurrency.lockutils [None req-f609ecce-b0c9-40f4-a25e-33556aca01b7 tempest-InstanceActionsV221TestJSON-959512409 tempest-InstanceActionsV221TestJSON-959512409-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 930.801267] env[62503]: DEBUG oslo_concurrency.lockutils [None req-f609ecce-b0c9-40f4-a25e-33556aca01b7 tempest-InstanceActionsV221TestJSON-959512409 tempest-InstanceActionsV221TestJSON-959512409-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 930.801577] env[62503]: DEBUG oslo_concurrency.lockutils [None req-f609ecce-b0c9-40f4-a25e-33556aca01b7 tempest-InstanceActionsV221TestJSON-959512409 tempest-InstanceActionsV221TestJSON-959512409-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 930.801836] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eedac2c9-0ec0-47f4-95b2-51993990cffd {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.805833] env[62503]: DEBUG oslo_vmware.api [None req-f609ecce-b0c9-40f4-a25e-33556aca01b7 tempest-InstanceActionsV221TestJSON-959512409 tempest-InstanceActionsV221TestJSON-959512409-project-member] Waiting for the task: (returnval){ [ 930.805833] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52588e0f-3225-70d3-ad83-361d472e297c" [ 930.805833] env[62503]: _type = "Task" [ 930.805833] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 930.812682] env[62503]: DEBUG oslo_vmware.api [None req-f609ecce-b0c9-40f4-a25e-33556aca01b7 tempest-InstanceActionsV221TestJSON-959512409 tempest-InstanceActionsV221TestJSON-959512409-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52588e0f-3225-70d3-ad83-361d472e297c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.837342] env[62503]: DEBUG nova.compute.utils [None req-5d3efbe8-4cdd-4e3e-a6a1-a7d4876131b5 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Using /dev/sd instead of None {{(pid=62503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 930.841132] env[62503]: DEBUG nova.compute.manager [None req-5d3efbe8-4cdd-4e3e-a6a1-a7d4876131b5 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: 09ff6008-f1eb-4ee0-af7d-c7f8268e3eb9] Allocating IP information in the background. {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 930.841132] env[62503]: DEBUG nova.network.neutron [None req-5d3efbe8-4cdd-4e3e-a6a1-a7d4876131b5 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: 09ff6008-f1eb-4ee0-af7d-c7f8268e3eb9] allocate_for_instance() {{(pid=62503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 930.883847] env[62503]: DEBUG nova.policy [None req-5d3efbe8-4cdd-4e3e-a6a1-a7d4876131b5 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '691809da402d4a29b085cfe3b22306b2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a833cd3315d0487cb3badd7b0d330a9a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62503) authorize /opt/stack/nova/nova/policy.py:201}} [ 930.943254] env[62503]: DEBUG nova.network.neutron [-] [instance: 7b8c670d-3f2a-431d-91da-4ced781e6e51] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 931.262915] env[62503]: DEBUG nova.network.neutron [req-aa1b81c8-598d-477c-95a8-b451ac7ce90c req-cd598901-af4d-4103-9b55-4bfea2b472ba service nova] [instance: 35bd28b5-101e-429f-8487-fbe5bf3528fb] Updated VIF entry in instance network info cache for port ab44a375-9f88-4434-9a19-932e1ea594f1. {{(pid=62503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 931.263366] env[62503]: DEBUG nova.network.neutron [req-aa1b81c8-598d-477c-95a8-b451ac7ce90c req-cd598901-af4d-4103-9b55-4bfea2b472ba service nova] [instance: 35bd28b5-101e-429f-8487-fbe5bf3528fb] Updating instance_info_cache with network_info: [{"id": "ab44a375-9f88-4434-9a19-932e1ea594f1", "address": "fa:16:3e:54:28:f9", "network": {"id": "4bd39623-d9ea-484f-adff-da8c244f3cdb", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-1552841154-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4d53e6317ef2436096ad618f28f0c2b8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c02dd284-ab80-451c-93eb-48c8360acb9c", "external-id": "nsx-vlan-transportzone-818", "segmentation_id": 818, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapab44a375-9f", "ovs_interfaceid": "ab44a375-9f88-4434-9a19-932e1ea594f1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 931.319645] env[62503]: DEBUG oslo_vmware.api [None req-f609ecce-b0c9-40f4-a25e-33556aca01b7 tempest-InstanceActionsV221TestJSON-959512409 tempest-InstanceActionsV221TestJSON-959512409-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52588e0f-3225-70d3-ad83-361d472e297c, 'name': SearchDatastore_Task, 'duration_secs': 0.009405} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 931.319955] env[62503]: DEBUG oslo_concurrency.lockutils [None req-f609ecce-b0c9-40f4-a25e-33556aca01b7 tempest-InstanceActionsV221TestJSON-959512409 tempest-InstanceActionsV221TestJSON-959512409-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 931.320222] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-f609ecce-b0c9-40f4-a25e-33556aca01b7 tempest-InstanceActionsV221TestJSON-959512409 tempest-InstanceActionsV221TestJSON-959512409-project-member] [instance: 35bd28b5-101e-429f-8487-fbe5bf3528fb] Processing image 8150ca02-f879-471d-8913-459408f127a1 {{(pid=62503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 931.320466] env[62503]: DEBUG oslo_concurrency.lockutils [None req-f609ecce-b0c9-40f4-a25e-33556aca01b7 tempest-InstanceActionsV221TestJSON-959512409 tempest-InstanceActionsV221TestJSON-959512409-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 931.320618] env[62503]: DEBUG oslo_concurrency.lockutils [None req-f609ecce-b0c9-40f4-a25e-33556aca01b7 tempest-InstanceActionsV221TestJSON-959512409 tempest-InstanceActionsV221TestJSON-959512409-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 931.320832] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-f609ecce-b0c9-40f4-a25e-33556aca01b7 tempest-InstanceActionsV221TestJSON-959512409 tempest-InstanceActionsV221TestJSON-959512409-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 931.321086] env[62503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7ba4c2cf-deb0-4506-ab9d-1062220b9f77 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.329506] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-f609ecce-b0c9-40f4-a25e-33556aca01b7 tempest-InstanceActionsV221TestJSON-959512409 tempest-InstanceActionsV221TestJSON-959512409-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 931.329700] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-f609ecce-b0c9-40f4-a25e-33556aca01b7 tempest-InstanceActionsV221TestJSON-959512409 tempest-InstanceActionsV221TestJSON-959512409-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 931.330429] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6aa2f540-62a1-49cd-b3be-1ca34b24fe7d {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.339762] env[62503]: DEBUG oslo_vmware.api [None req-f609ecce-b0c9-40f4-a25e-33556aca01b7 tempest-InstanceActionsV221TestJSON-959512409 tempest-InstanceActionsV221TestJSON-959512409-project-member] Waiting for the task: (returnval){ [ 931.339762] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]5271b962-5975-f615-e8b5-2c6d219a7b75" [ 931.339762] env[62503]: _type = "Task" [ 931.339762] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 931.343453] env[62503]: DEBUG nova.compute.manager [None req-5d3efbe8-4cdd-4e3e-a6a1-a7d4876131b5 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: 09ff6008-f1eb-4ee0-af7d-c7f8268e3eb9] Start building block device mappings for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2867}} [ 931.356073] env[62503]: DEBUG oslo_vmware.api [None req-f609ecce-b0c9-40f4-a25e-33556aca01b7 tempest-InstanceActionsV221TestJSON-959512409 tempest-InstanceActionsV221TestJSON-959512409-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]5271b962-5975-f615-e8b5-2c6d219a7b75, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.375785] env[62503]: DEBUG nova.network.neutron [None req-5d3efbe8-4cdd-4e3e-a6a1-a7d4876131b5 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: 09ff6008-f1eb-4ee0-af7d-c7f8268e3eb9] Successfully created port: 814e714d-f9f3-47d3-a228-0f1fd7000eb1 {{(pid=62503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 931.447642] env[62503]: INFO nova.compute.manager [-] [instance: 7b8c670d-3f2a-431d-91da-4ced781e6e51] Took 1.24 seconds to deallocate network for instance. [ 931.548858] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12496627-986c-40b2-b65b-c11a2293203c {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.556761] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd653d13-403c-4fa9-b8f0-dd8a234d2dc4 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.586354] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cd8cfc3-6a97-4223-b991-16dd2e2ac237 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.593626] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d16df4dc-6bf6-48d2-874d-0b5d5d8a5a94 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.606572] env[62503]: DEBUG nova.compute.provider_tree [None req-7eeeb217-fdb2-41f4-98af-448aa496c9ae tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 931.766614] env[62503]: DEBUG oslo_concurrency.lockutils [req-aa1b81c8-598d-477c-95a8-b451ac7ce90c req-cd598901-af4d-4103-9b55-4bfea2b472ba service nova] Releasing lock "refresh_cache-35bd28b5-101e-429f-8487-fbe5bf3528fb" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 931.766970] env[62503]: DEBUG nova.compute.manager [req-aa1b81c8-598d-477c-95a8-b451ac7ce90c req-cd598901-af4d-4103-9b55-4bfea2b472ba service nova] [instance: 7b8c670d-3f2a-431d-91da-4ced781e6e51] Received event network-vif-deleted-e75a1fc0-2c4a-4103-8539-b8a7d4f9b24e {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 931.767230] env[62503]: INFO nova.compute.manager [req-aa1b81c8-598d-477c-95a8-b451ac7ce90c req-cd598901-af4d-4103-9b55-4bfea2b472ba service nova] [instance: 7b8c670d-3f2a-431d-91da-4ced781e6e51] Neutron deleted interface e75a1fc0-2c4a-4103-8539-b8a7d4f9b24e; detaching it from the instance and deleting it from the info cache [ 931.767467] env[62503]: DEBUG nova.network.neutron [req-aa1b81c8-598d-477c-95a8-b451ac7ce90c req-cd598901-af4d-4103-9b55-4bfea2b472ba service nova] [instance: 7b8c670d-3f2a-431d-91da-4ced781e6e51] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 931.851197] env[62503]: DEBUG oslo_vmware.api [None req-f609ecce-b0c9-40f4-a25e-33556aca01b7 tempest-InstanceActionsV221TestJSON-959512409 tempest-InstanceActionsV221TestJSON-959512409-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]5271b962-5975-f615-e8b5-2c6d219a7b75, 'name': SearchDatastore_Task, 'duration_secs': 0.02727} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 931.851950] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a57ea3fc-2735-44b3-8fde-6d31c570f28a {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.859551] env[62503]: DEBUG oslo_vmware.api [None req-f609ecce-b0c9-40f4-a25e-33556aca01b7 tempest-InstanceActionsV221TestJSON-959512409 tempest-InstanceActionsV221TestJSON-959512409-project-member] Waiting for the task: (returnval){ [ 931.859551] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52c78b36-478c-f483-0bf8-76f8e0e6cfe8" [ 931.859551] env[62503]: _type = "Task" [ 931.859551] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 931.867333] env[62503]: DEBUG oslo_vmware.api [None req-f609ecce-b0c9-40f4-a25e-33556aca01b7 tempest-InstanceActionsV221TestJSON-959512409 tempest-InstanceActionsV221TestJSON-959512409-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52c78b36-478c-f483-0bf8-76f8e0e6cfe8, 'name': SearchDatastore_Task, 'duration_secs': 0.007904} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 931.867563] env[62503]: DEBUG oslo_concurrency.lockutils [None req-f609ecce-b0c9-40f4-a25e-33556aca01b7 tempest-InstanceActionsV221TestJSON-959512409 tempest-InstanceActionsV221TestJSON-959512409-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 931.867812] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-f609ecce-b0c9-40f4-a25e-33556aca01b7 tempest-InstanceActionsV221TestJSON-959512409 tempest-InstanceActionsV221TestJSON-959512409-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk to [datastore2] 35bd28b5-101e-429f-8487-fbe5bf3528fb/35bd28b5-101e-429f-8487-fbe5bf3528fb.vmdk {{(pid=62503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 931.868052] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f7051877-c15b-4cc9-a39e-81eceada9cb0 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.873475] env[62503]: DEBUG oslo_vmware.api [None req-f609ecce-b0c9-40f4-a25e-33556aca01b7 tempest-InstanceActionsV221TestJSON-959512409 tempest-InstanceActionsV221TestJSON-959512409-project-member] Waiting for the task: (returnval){ [ 931.873475] env[62503]: value = "task-1387965" [ 931.873475] env[62503]: _type = "Task" [ 931.873475] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 931.880557] env[62503]: DEBUG oslo_vmware.api [None req-f609ecce-b0c9-40f4-a25e-33556aca01b7 tempest-InstanceActionsV221TestJSON-959512409 tempest-InstanceActionsV221TestJSON-959512409-project-member] Task: {'id': task-1387965, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.954681] env[62503]: DEBUG oslo_concurrency.lockutils [None req-753652d2-1501-4933-9a2e-a2a95b2bb77c tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 932.110079] env[62503]: DEBUG nova.scheduler.client.report [None req-7eeeb217-fdb2-41f4-98af-448aa496c9ae tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 932.270021] env[62503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-bacb7e57-958f-4d06-ad9b-7aeed3e36b9d {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.280077] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31176aee-5bc5-4b01-b500-4fbe15cf4d32 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.306958] env[62503]: DEBUG nova.compute.manager [req-aa1b81c8-598d-477c-95a8-b451ac7ce90c req-cd598901-af4d-4103-9b55-4bfea2b472ba service nova] [instance: 7b8c670d-3f2a-431d-91da-4ced781e6e51] Detach interface failed, port_id=e75a1fc0-2c4a-4103-8539-b8a7d4f9b24e, reason: Instance 7b8c670d-3f2a-431d-91da-4ced781e6e51 could not be found. {{(pid=62503) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11261}} [ 932.361756] env[62503]: DEBUG nova.compute.manager [None req-5d3efbe8-4cdd-4e3e-a6a1-a7d4876131b5 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: 09ff6008-f1eb-4ee0-af7d-c7f8268e3eb9] Start spawning the instance on the hypervisor. {{(pid=62503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2641}} [ 932.383861] env[62503]: DEBUG oslo_vmware.api [None req-f609ecce-b0c9-40f4-a25e-33556aca01b7 tempest-InstanceActionsV221TestJSON-959512409 tempest-InstanceActionsV221TestJSON-959512409-project-member] Task: {'id': task-1387965, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.488629} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 932.384146] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-f609ecce-b0c9-40f4-a25e-33556aca01b7 tempest-InstanceActionsV221TestJSON-959512409 tempest-InstanceActionsV221TestJSON-959512409-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk to [datastore2] 35bd28b5-101e-429f-8487-fbe5bf3528fb/35bd28b5-101e-429f-8487-fbe5bf3528fb.vmdk {{(pid=62503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 932.384881] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-f609ecce-b0c9-40f4-a25e-33556aca01b7 tempest-InstanceActionsV221TestJSON-959512409 tempest-InstanceActionsV221TestJSON-959512409-project-member] [instance: 35bd28b5-101e-429f-8487-fbe5bf3528fb] Extending root virtual disk to 1048576 {{(pid=62503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 932.384881] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-95b4eb5f-ccdf-4e9e-87ac-0ad5e3b5fc60 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.389307] env[62503]: DEBUG nova.virt.hardware [None req-5d3efbe8-4cdd-4e3e-a6a1-a7d4876131b5 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:26:20Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:26:03Z,direct_url=,disk_format='vmdk',id=8150ca02-f879-471d-8913-459408f127a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26d9ee75d25b4018b8bb1fb11c8bd98c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:26:04Z,virtual_size=,visibility=), allow threads: False {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 932.389545] env[62503]: DEBUG nova.virt.hardware [None req-5d3efbe8-4cdd-4e3e-a6a1-a7d4876131b5 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Flavor limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 932.389710] env[62503]: DEBUG nova.virt.hardware [None req-5d3efbe8-4cdd-4e3e-a6a1-a7d4876131b5 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Image limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 932.389897] env[62503]: DEBUG nova.virt.hardware [None req-5d3efbe8-4cdd-4e3e-a6a1-a7d4876131b5 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Flavor pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 932.390072] env[62503]: DEBUG nova.virt.hardware [None req-5d3efbe8-4cdd-4e3e-a6a1-a7d4876131b5 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Image pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 932.390241] env[62503]: DEBUG nova.virt.hardware [None req-5d3efbe8-4cdd-4e3e-a6a1-a7d4876131b5 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 932.390515] env[62503]: DEBUG nova.virt.hardware [None req-5d3efbe8-4cdd-4e3e-a6a1-a7d4876131b5 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 932.390718] env[62503]: DEBUG nova.virt.hardware [None req-5d3efbe8-4cdd-4e3e-a6a1-a7d4876131b5 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 932.390941] env[62503]: DEBUG nova.virt.hardware [None req-5d3efbe8-4cdd-4e3e-a6a1-a7d4876131b5 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Got 1 possible topologies {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 932.391160] env[62503]: DEBUG nova.virt.hardware [None req-5d3efbe8-4cdd-4e3e-a6a1-a7d4876131b5 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 932.391353] env[62503]: DEBUG nova.virt.hardware [None req-5d3efbe8-4cdd-4e3e-a6a1-a7d4876131b5 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 932.392322] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8000eff8-1d05-48d4-9441-3c0c382041dd {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.396344] env[62503]: DEBUG oslo_vmware.api [None req-f609ecce-b0c9-40f4-a25e-33556aca01b7 tempest-InstanceActionsV221TestJSON-959512409 tempest-InstanceActionsV221TestJSON-959512409-project-member] Waiting for the task: (returnval){ [ 932.396344] env[62503]: value = "task-1387966" [ 932.396344] env[62503]: _type = "Task" [ 932.396344] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.402622] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6c0798d-9f01-4014-aad1-8dce80b3caef {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.408640] env[62503]: DEBUG oslo_vmware.api [None req-f609ecce-b0c9-40f4-a25e-33556aca01b7 tempest-InstanceActionsV221TestJSON-959512409 tempest-InstanceActionsV221TestJSON-959512409-project-member] Task: {'id': task-1387966, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.615420] env[62503]: DEBUG oslo_concurrency.lockutils [None req-7eeeb217-fdb2-41f4-98af-448aa496c9ae tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.283s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 932.615924] env[62503]: DEBUG nova.compute.manager [None req-7eeeb217-fdb2-41f4-98af-448aa496c9ae tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: 5ba614a3-17be-4069-8219-f88f4d27aab9] Start building networks asynchronously for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2832}} [ 932.618453] env[62503]: DEBUG oslo_concurrency.lockutils [None req-9e86ee46-4d8c-4f28-84b2-a369260b22e3 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.449s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 932.619865] env[62503]: INFO nova.compute.claims [None req-9e86ee46-4d8c-4f28-84b2-a369260b22e3 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: e0df0ce5-1e88-4a39-8911-529b235f5b88] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 932.835841] env[62503]: DEBUG nova.compute.manager [req-660840c8-9c25-4add-b4b9-b3c4d5b3f225 req-c9681086-42dc-4123-b2ce-4a15ec451e44 service nova] [instance: 09ff6008-f1eb-4ee0-af7d-c7f8268e3eb9] Received event network-vif-plugged-814e714d-f9f3-47d3-a228-0f1fd7000eb1 {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 932.836100] env[62503]: DEBUG oslo_concurrency.lockutils [req-660840c8-9c25-4add-b4b9-b3c4d5b3f225 req-c9681086-42dc-4123-b2ce-4a15ec451e44 service nova] Acquiring lock "09ff6008-f1eb-4ee0-af7d-c7f8268e3eb9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 932.836329] env[62503]: DEBUG oslo_concurrency.lockutils [req-660840c8-9c25-4add-b4b9-b3c4d5b3f225 req-c9681086-42dc-4123-b2ce-4a15ec451e44 service nova] Lock "09ff6008-f1eb-4ee0-af7d-c7f8268e3eb9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 932.836506] env[62503]: DEBUG oslo_concurrency.lockutils [req-660840c8-9c25-4add-b4b9-b3c4d5b3f225 req-c9681086-42dc-4123-b2ce-4a15ec451e44 service nova] Lock "09ff6008-f1eb-4ee0-af7d-c7f8268e3eb9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 932.836680] env[62503]: DEBUG nova.compute.manager [req-660840c8-9c25-4add-b4b9-b3c4d5b3f225 req-c9681086-42dc-4123-b2ce-4a15ec451e44 service nova] [instance: 09ff6008-f1eb-4ee0-af7d-c7f8268e3eb9] No waiting events found dispatching network-vif-plugged-814e714d-f9f3-47d3-a228-0f1fd7000eb1 {{(pid=62503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 932.836853] env[62503]: WARNING nova.compute.manager [req-660840c8-9c25-4add-b4b9-b3c4d5b3f225 req-c9681086-42dc-4123-b2ce-4a15ec451e44 service nova] [instance: 09ff6008-f1eb-4ee0-af7d-c7f8268e3eb9] Received unexpected event network-vif-plugged-814e714d-f9f3-47d3-a228-0f1fd7000eb1 for instance with vm_state building and task_state spawning. [ 932.906223] env[62503]: DEBUG oslo_vmware.api [None req-f609ecce-b0c9-40f4-a25e-33556aca01b7 tempest-InstanceActionsV221TestJSON-959512409 tempest-InstanceActionsV221TestJSON-959512409-project-member] Task: {'id': task-1387966, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.059769} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 932.906519] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-f609ecce-b0c9-40f4-a25e-33556aca01b7 tempest-InstanceActionsV221TestJSON-959512409 tempest-InstanceActionsV221TestJSON-959512409-project-member] [instance: 35bd28b5-101e-429f-8487-fbe5bf3528fb] Extended root virtual disk {{(pid=62503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 932.907242] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba377cb3-c444-4a2b-a2d5-b1688721c4fb {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.928649] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-f609ecce-b0c9-40f4-a25e-33556aca01b7 tempest-InstanceActionsV221TestJSON-959512409 tempest-InstanceActionsV221TestJSON-959512409-project-member] [instance: 35bd28b5-101e-429f-8487-fbe5bf3528fb] Reconfiguring VM instance instance-00000053 to attach disk [datastore2] 35bd28b5-101e-429f-8487-fbe5bf3528fb/35bd28b5-101e-429f-8487-fbe5bf3528fb.vmdk or device None with type sparse {{(pid=62503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 932.929528] env[62503]: DEBUG nova.network.neutron [None req-5d3efbe8-4cdd-4e3e-a6a1-a7d4876131b5 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: 09ff6008-f1eb-4ee0-af7d-c7f8268e3eb9] Successfully updated port: 814e714d-f9f3-47d3-a228-0f1fd7000eb1 {{(pid=62503) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 932.930622] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f80d1089-5b87-45c5-9b38-9b006180c663 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.950981] env[62503]: DEBUG oslo_vmware.api [None req-f609ecce-b0c9-40f4-a25e-33556aca01b7 tempest-InstanceActionsV221TestJSON-959512409 tempest-InstanceActionsV221TestJSON-959512409-project-member] Waiting for the task: (returnval){ [ 932.950981] env[62503]: value = "task-1387967" [ 932.950981] env[62503]: _type = "Task" [ 932.950981] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.958498] env[62503]: DEBUG oslo_vmware.api [None req-f609ecce-b0c9-40f4-a25e-33556aca01b7 tempest-InstanceActionsV221TestJSON-959512409 tempest-InstanceActionsV221TestJSON-959512409-project-member] Task: {'id': task-1387967, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.124032] env[62503]: DEBUG nova.compute.utils [None req-7eeeb217-fdb2-41f4-98af-448aa496c9ae tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Using /dev/sd instead of None {{(pid=62503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 933.127037] env[62503]: DEBUG nova.compute.manager [None req-7eeeb217-fdb2-41f4-98af-448aa496c9ae tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: 5ba614a3-17be-4069-8219-f88f4d27aab9] Allocating IP information in the background. {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 933.127154] env[62503]: DEBUG nova.network.neutron [None req-7eeeb217-fdb2-41f4-98af-448aa496c9ae tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: 5ba614a3-17be-4069-8219-f88f4d27aab9] allocate_for_instance() {{(pid=62503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 933.176874] env[62503]: DEBUG nova.policy [None req-7eeeb217-fdb2-41f4-98af-448aa496c9ae tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b076e358f78e4874876f90d96fd612e5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e818e5ee9dc24efa96747c9558514a15', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62503) authorize /opt/stack/nova/nova/policy.py:201}} [ 933.415536] env[62503]: DEBUG nova.network.neutron [None req-7eeeb217-fdb2-41f4-98af-448aa496c9ae tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: 5ba614a3-17be-4069-8219-f88f4d27aab9] Successfully created port: 4be6b321-1129-4f1b-9ca4-aa83bc5b715d {{(pid=62503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 933.446352] env[62503]: DEBUG oslo_concurrency.lockutils [None req-5d3efbe8-4cdd-4e3e-a6a1-a7d4876131b5 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Acquiring lock "refresh_cache-09ff6008-f1eb-4ee0-af7d-c7f8268e3eb9" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 933.446508] env[62503]: DEBUG oslo_concurrency.lockutils [None req-5d3efbe8-4cdd-4e3e-a6a1-a7d4876131b5 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Acquired lock "refresh_cache-09ff6008-f1eb-4ee0-af7d-c7f8268e3eb9" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 933.446664] env[62503]: DEBUG nova.network.neutron [None req-5d3efbe8-4cdd-4e3e-a6a1-a7d4876131b5 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: 09ff6008-f1eb-4ee0-af7d-c7f8268e3eb9] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 933.461311] env[62503]: DEBUG oslo_vmware.api [None req-f609ecce-b0c9-40f4-a25e-33556aca01b7 tempest-InstanceActionsV221TestJSON-959512409 tempest-InstanceActionsV221TestJSON-959512409-project-member] Task: {'id': task-1387967, 'name': ReconfigVM_Task, 'duration_secs': 0.284058} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.461589] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-f609ecce-b0c9-40f4-a25e-33556aca01b7 tempest-InstanceActionsV221TestJSON-959512409 tempest-InstanceActionsV221TestJSON-959512409-project-member] [instance: 35bd28b5-101e-429f-8487-fbe5bf3528fb] Reconfigured VM instance instance-00000053 to attach disk [datastore2] 35bd28b5-101e-429f-8487-fbe5bf3528fb/35bd28b5-101e-429f-8487-fbe5bf3528fb.vmdk or device None with type sparse {{(pid=62503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 933.462430] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f852493b-28ce-4621-b989-64b448b79870 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.468777] env[62503]: DEBUG oslo_vmware.api [None req-f609ecce-b0c9-40f4-a25e-33556aca01b7 tempest-InstanceActionsV221TestJSON-959512409 tempest-InstanceActionsV221TestJSON-959512409-project-member] Waiting for the task: (returnval){ [ 933.468777] env[62503]: value = "task-1387968" [ 933.468777] env[62503]: _type = "Task" [ 933.468777] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 933.476510] env[62503]: DEBUG oslo_vmware.api [None req-f609ecce-b0c9-40f4-a25e-33556aca01b7 tempest-InstanceActionsV221TestJSON-959512409 tempest-InstanceActionsV221TestJSON-959512409-project-member] Task: {'id': task-1387968, 'name': Rename_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.628086] env[62503]: DEBUG nova.compute.manager [None req-7eeeb217-fdb2-41f4-98af-448aa496c9ae tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: 5ba614a3-17be-4069-8219-f88f4d27aab9] Start building block device mappings for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2867}} [ 933.818201] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f6bf36c-abe0-4e56-8041-7fda608abb3f {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.825950] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-280162fe-ebf9-4cd8-a5f7-a568025906bc {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.855491] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-560ddfc7-d1fa-4bca-8af1-ee7205c71519 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.862340] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4982fc8-782c-423a-b12b-276421f6b84b {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.875084] env[62503]: DEBUG nova.compute.provider_tree [None req-9e86ee46-4d8c-4f28-84b2-a369260b22e3 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 933.937181] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-4af1549b-3fbf-4f0a-aa84-7c2e2f786268 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: e693bcc2-3883-466d-913c-831146ca81e7] Volume attach. Driver type: vmdk {{(pid=62503) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 933.937399] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-4af1549b-3fbf-4f0a-aa84-7c2e2f786268 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: e693bcc2-3883-466d-913c-831146ca81e7] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-294612', 'volume_id': '628fd04b-dba0-424f-9dcd-1fc8f4074db1', 'name': 'volume-628fd04b-dba0-424f-9dcd-1fc8f4074db1', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'e693bcc2-3883-466d-913c-831146ca81e7', 'attached_at': '', 'detached_at': '', 'volume_id': '628fd04b-dba0-424f-9dcd-1fc8f4074db1', 'serial': '628fd04b-dba0-424f-9dcd-1fc8f4074db1'} {{(pid=62503) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 933.938235] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-006a00cf-9f68-42b8-9c24-cf8fe4b9725b {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.959803] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c07d219e-1a1a-45b1-8961-c91a3107719b {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.984255] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-4af1549b-3fbf-4f0a-aa84-7c2e2f786268 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: e693bcc2-3883-466d-913c-831146ca81e7] Reconfiguring VM instance instance-00000048 to attach disk [datastore2] volume-628fd04b-dba0-424f-9dcd-1fc8f4074db1/volume-628fd04b-dba0-424f-9dcd-1fc8f4074db1.vmdk or device None with type thin {{(pid=62503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 933.987308] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-55f1af9d-ddb2-427e-ba98-01a30df24623 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.000226] env[62503]: DEBUG nova.network.neutron [None req-5d3efbe8-4cdd-4e3e-a6a1-a7d4876131b5 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: 09ff6008-f1eb-4ee0-af7d-c7f8268e3eb9] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 934.007518] env[62503]: DEBUG oslo_vmware.api [None req-f609ecce-b0c9-40f4-a25e-33556aca01b7 tempest-InstanceActionsV221TestJSON-959512409 tempest-InstanceActionsV221TestJSON-959512409-project-member] Task: {'id': task-1387968, 'name': Rename_Task, 'duration_secs': 0.142965} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 934.008760] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-f609ecce-b0c9-40f4-a25e-33556aca01b7 tempest-InstanceActionsV221TestJSON-959512409 tempest-InstanceActionsV221TestJSON-959512409-project-member] [instance: 35bd28b5-101e-429f-8487-fbe5bf3528fb] Powering on the VM {{(pid=62503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 934.009089] env[62503]: DEBUG oslo_vmware.api [None req-4af1549b-3fbf-4f0a-aa84-7c2e2f786268 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Waiting for the task: (returnval){ [ 934.009089] env[62503]: value = "task-1387969" [ 934.009089] env[62503]: _type = "Task" [ 934.009089] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 934.009287] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bd24822a-c47c-449a-ac24-e4d046c60f18 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.018620] env[62503]: DEBUG oslo_vmware.api [None req-4af1549b-3fbf-4f0a-aa84-7c2e2f786268 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Task: {'id': task-1387969, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.021846] env[62503]: DEBUG oslo_vmware.api [None req-f609ecce-b0c9-40f4-a25e-33556aca01b7 tempest-InstanceActionsV221TestJSON-959512409 tempest-InstanceActionsV221TestJSON-959512409-project-member] Waiting for the task: (returnval){ [ 934.021846] env[62503]: value = "task-1387970" [ 934.021846] env[62503]: _type = "Task" [ 934.021846] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 934.155464] env[62503]: DEBUG nova.network.neutron [None req-5d3efbe8-4cdd-4e3e-a6a1-a7d4876131b5 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: 09ff6008-f1eb-4ee0-af7d-c7f8268e3eb9] Updating instance_info_cache with network_info: [{"id": "814e714d-f9f3-47d3-a228-0f1fd7000eb1", "address": "fa:16:3e:3e:5c:6e", "network": {"id": "9b07889f-f8fb-4eed-a587-1e31b05ce03a", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1446403066-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a833cd3315d0487cb3badd7b0d330a9a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ec3f9e71-839a-429d-b211-d3dfc98ca4f6", "external-id": "nsx-vlan-transportzone-5", "segmentation_id": 5, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap814e714d-f9", "ovs_interfaceid": "814e714d-f9f3-47d3-a228-0f1fd7000eb1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 934.378543] env[62503]: DEBUG nova.scheduler.client.report [None req-9e86ee46-4d8c-4f28-84b2-a369260b22e3 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 934.524031] env[62503]: DEBUG oslo_vmware.api [None req-4af1549b-3fbf-4f0a-aa84-7c2e2f786268 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Task: {'id': task-1387969, 'name': ReconfigVM_Task, 'duration_secs': 0.362939} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 934.526321] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-4af1549b-3fbf-4f0a-aa84-7c2e2f786268 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: e693bcc2-3883-466d-913c-831146ca81e7] Reconfigured VM instance instance-00000048 to attach disk [datastore2] volume-628fd04b-dba0-424f-9dcd-1fc8f4074db1/volume-628fd04b-dba0-424f-9dcd-1fc8f4074db1.vmdk or device None with type thin {{(pid=62503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 934.531229] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2082ba26-8b17-4d17-97ad-e4ed124010bd {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.548272] env[62503]: DEBUG oslo_vmware.api [None req-f609ecce-b0c9-40f4-a25e-33556aca01b7 tempest-InstanceActionsV221TestJSON-959512409 tempest-InstanceActionsV221TestJSON-959512409-project-member] Task: {'id': task-1387970, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.549594] env[62503]: DEBUG oslo_vmware.api [None req-4af1549b-3fbf-4f0a-aa84-7c2e2f786268 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Waiting for the task: (returnval){ [ 934.549594] env[62503]: value = "task-1387971" [ 934.549594] env[62503]: _type = "Task" [ 934.549594] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 934.557155] env[62503]: DEBUG oslo_vmware.api [None req-4af1549b-3fbf-4f0a-aa84-7c2e2f786268 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Task: {'id': task-1387971, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.642829] env[62503]: DEBUG nova.compute.manager [None req-7eeeb217-fdb2-41f4-98af-448aa496c9ae tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: 5ba614a3-17be-4069-8219-f88f4d27aab9] Start spawning the instance on the hypervisor. {{(pid=62503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2641}} [ 934.659366] env[62503]: DEBUG oslo_concurrency.lockutils [None req-5d3efbe8-4cdd-4e3e-a6a1-a7d4876131b5 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Releasing lock "refresh_cache-09ff6008-f1eb-4ee0-af7d-c7f8268e3eb9" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 934.660312] env[62503]: DEBUG nova.compute.manager [None req-5d3efbe8-4cdd-4e3e-a6a1-a7d4876131b5 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: 09ff6008-f1eb-4ee0-af7d-c7f8268e3eb9] Instance network_info: |[{"id": "814e714d-f9f3-47d3-a228-0f1fd7000eb1", "address": "fa:16:3e:3e:5c:6e", "network": {"id": "9b07889f-f8fb-4eed-a587-1e31b05ce03a", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1446403066-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a833cd3315d0487cb3badd7b0d330a9a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ec3f9e71-839a-429d-b211-d3dfc98ca4f6", "external-id": "nsx-vlan-transportzone-5", "segmentation_id": 5, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap814e714d-f9", "ovs_interfaceid": "814e714d-f9f3-47d3-a228-0f1fd7000eb1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1999}} [ 934.661324] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-5d3efbe8-4cdd-4e3e-a6a1-a7d4876131b5 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: 09ff6008-f1eb-4ee0-af7d-c7f8268e3eb9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3e:5c:6e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ec3f9e71-839a-429d-b211-d3dfc98ca4f6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '814e714d-f9f3-47d3-a228-0f1fd7000eb1', 'vif_model': 'vmxnet3'}] {{(pid=62503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 934.669114] env[62503]: DEBUG oslo.service.loopingcall [None req-5d3efbe8-4cdd-4e3e-a6a1-a7d4876131b5 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 934.671289] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 09ff6008-f1eb-4ee0-af7d-c7f8268e3eb9] Creating VM on the ESX host {{(pid=62503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 934.671729] env[62503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2bd2ea61-c2f9-455f-9a3e-cfa81758c043 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.687619] env[62503]: DEBUG nova.virt.hardware [None req-7eeeb217-fdb2-41f4-98af-448aa496c9ae tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:26:20Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:26:03Z,direct_url=,disk_format='vmdk',id=8150ca02-f879-471d-8913-459408f127a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26d9ee75d25b4018b8bb1fb11c8bd98c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:26:04Z,virtual_size=,visibility=), allow threads: False {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 934.687859] env[62503]: DEBUG nova.virt.hardware [None req-7eeeb217-fdb2-41f4-98af-448aa496c9ae tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Flavor limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 934.688036] env[62503]: DEBUG nova.virt.hardware [None req-7eeeb217-fdb2-41f4-98af-448aa496c9ae tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Image limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 934.688233] env[62503]: DEBUG nova.virt.hardware [None req-7eeeb217-fdb2-41f4-98af-448aa496c9ae tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Flavor pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 934.688381] env[62503]: DEBUG nova.virt.hardware [None req-7eeeb217-fdb2-41f4-98af-448aa496c9ae tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Image pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 934.688529] env[62503]: DEBUG nova.virt.hardware [None req-7eeeb217-fdb2-41f4-98af-448aa496c9ae tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 934.688738] env[62503]: DEBUG nova.virt.hardware [None req-7eeeb217-fdb2-41f4-98af-448aa496c9ae tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 934.688918] env[62503]: DEBUG nova.virt.hardware [None req-7eeeb217-fdb2-41f4-98af-448aa496c9ae tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 934.689124] env[62503]: DEBUG nova.virt.hardware [None req-7eeeb217-fdb2-41f4-98af-448aa496c9ae tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Got 1 possible topologies {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 934.689295] env[62503]: DEBUG nova.virt.hardware [None req-7eeeb217-fdb2-41f4-98af-448aa496c9ae tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 934.689473] env[62503]: DEBUG nova.virt.hardware [None req-7eeeb217-fdb2-41f4-98af-448aa496c9ae tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 934.690258] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e061bfcf-b66e-4044-86dd-710e0ee794db {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.698533] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db74f486-58cc-4688-babf-ea8f279ae854 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.701952] env[62503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 934.701952] env[62503]: value = "task-1387972" [ 934.701952] env[62503]: _type = "Task" [ 934.701952] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 934.716617] env[62503]: DEBUG oslo_vmware.api [-] Task: {'id': task-1387972, 'name': CreateVM_Task} progress is 6%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.862165] env[62503]: DEBUG nova.compute.manager [req-0a798296-c2d8-4279-a8fa-76b3fdd883cf req-e6b6e662-bd3a-4e15-a944-ec757f1f1b04 service nova] [instance: 09ff6008-f1eb-4ee0-af7d-c7f8268e3eb9] Received event network-changed-814e714d-f9f3-47d3-a228-0f1fd7000eb1 {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 934.862522] env[62503]: DEBUG nova.compute.manager [req-0a798296-c2d8-4279-a8fa-76b3fdd883cf req-e6b6e662-bd3a-4e15-a944-ec757f1f1b04 service nova] [instance: 09ff6008-f1eb-4ee0-af7d-c7f8268e3eb9] Refreshing instance network info cache due to event network-changed-814e714d-f9f3-47d3-a228-0f1fd7000eb1. {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11432}} [ 934.862751] env[62503]: DEBUG oslo_concurrency.lockutils [req-0a798296-c2d8-4279-a8fa-76b3fdd883cf req-e6b6e662-bd3a-4e15-a944-ec757f1f1b04 service nova] Acquiring lock "refresh_cache-09ff6008-f1eb-4ee0-af7d-c7f8268e3eb9" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 934.862992] env[62503]: DEBUG oslo_concurrency.lockutils [req-0a798296-c2d8-4279-a8fa-76b3fdd883cf req-e6b6e662-bd3a-4e15-a944-ec757f1f1b04 service nova] Acquired lock "refresh_cache-09ff6008-f1eb-4ee0-af7d-c7f8268e3eb9" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 934.863441] env[62503]: DEBUG nova.network.neutron [req-0a798296-c2d8-4279-a8fa-76b3fdd883cf req-e6b6e662-bd3a-4e15-a944-ec757f1f1b04 service nova] [instance: 09ff6008-f1eb-4ee0-af7d-c7f8268e3eb9] Refreshing network info cache for port 814e714d-f9f3-47d3-a228-0f1fd7000eb1 {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 934.883247] env[62503]: DEBUG oslo_concurrency.lockutils [None req-9e86ee46-4d8c-4f28-84b2-a369260b22e3 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.265s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 934.883866] env[62503]: DEBUG nova.compute.manager [None req-9e86ee46-4d8c-4f28-84b2-a369260b22e3 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: e0df0ce5-1e88-4a39-8911-529b235f5b88] Start building networks asynchronously for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2832}} [ 934.886689] env[62503]: DEBUG oslo_concurrency.lockutils [None req-f7331f54-a763-48c7-a6c8-0aa9c4ad457f tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 9.186s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 934.886974] env[62503]: DEBUG nova.objects.instance [None req-f7331f54-a763-48c7-a6c8-0aa9c4ad457f tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Lazy-loading 'resources' on Instance uuid 9ccdc727-536e-4db8-bad4-960858254758 {{(pid=62503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 935.012495] env[62503]: DEBUG nova.network.neutron [None req-7eeeb217-fdb2-41f4-98af-448aa496c9ae tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: 5ba614a3-17be-4069-8219-f88f4d27aab9] Successfully updated port: 4be6b321-1129-4f1b-9ca4-aa83bc5b715d {{(pid=62503) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 935.032573] env[62503]: DEBUG oslo_vmware.api [None req-f609ecce-b0c9-40f4-a25e-33556aca01b7 tempest-InstanceActionsV221TestJSON-959512409 tempest-InstanceActionsV221TestJSON-959512409-project-member] Task: {'id': task-1387970, 'name': PowerOnVM_Task, 'duration_secs': 0.629609} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 935.032799] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-f609ecce-b0c9-40f4-a25e-33556aca01b7 tempest-InstanceActionsV221TestJSON-959512409 tempest-InstanceActionsV221TestJSON-959512409-project-member] [instance: 35bd28b5-101e-429f-8487-fbe5bf3528fb] Powered on the VM {{(pid=62503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 935.033017] env[62503]: INFO nova.compute.manager [None req-f609ecce-b0c9-40f4-a25e-33556aca01b7 tempest-InstanceActionsV221TestJSON-959512409 tempest-InstanceActionsV221TestJSON-959512409-project-member] [instance: 35bd28b5-101e-429f-8487-fbe5bf3528fb] Took 6.74 seconds to spawn the instance on the hypervisor. [ 935.033565] env[62503]: DEBUG nova.compute.manager [None req-f609ecce-b0c9-40f4-a25e-33556aca01b7 tempest-InstanceActionsV221TestJSON-959512409 tempest-InstanceActionsV221TestJSON-959512409-project-member] [instance: 35bd28b5-101e-429f-8487-fbe5bf3528fb] Checking state {{(pid=62503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1794}} [ 935.034333] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd1f8f82-c136-4846-8af7-6c2b532d1793 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.058364] env[62503]: DEBUG oslo_vmware.api [None req-4af1549b-3fbf-4f0a-aa84-7c2e2f786268 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Task: {'id': task-1387971, 'name': ReconfigVM_Task, 'duration_secs': 0.142463} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 935.058661] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-4af1549b-3fbf-4f0a-aa84-7c2e2f786268 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: e693bcc2-3883-466d-913c-831146ca81e7] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-294612', 'volume_id': '628fd04b-dba0-424f-9dcd-1fc8f4074db1', 'name': 'volume-628fd04b-dba0-424f-9dcd-1fc8f4074db1', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'e693bcc2-3883-466d-913c-831146ca81e7', 'attached_at': '', 'detached_at': '', 'volume_id': '628fd04b-dba0-424f-9dcd-1fc8f4074db1', 'serial': '628fd04b-dba0-424f-9dcd-1fc8f4074db1'} {{(pid=62503) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 935.213867] env[62503]: DEBUG oslo_vmware.api [-] Task: {'id': task-1387972, 'name': CreateVM_Task, 'duration_secs': 0.306702} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 935.214044] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 09ff6008-f1eb-4ee0-af7d-c7f8268e3eb9] Created VM on the ESX host {{(pid=62503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 935.214672] env[62503]: DEBUG oslo_concurrency.lockutils [None req-5d3efbe8-4cdd-4e3e-a6a1-a7d4876131b5 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 935.214846] env[62503]: DEBUG oslo_concurrency.lockutils [None req-5d3efbe8-4cdd-4e3e-a6a1-a7d4876131b5 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Acquired lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 935.215199] env[62503]: DEBUG oslo_concurrency.lockutils [None req-5d3efbe8-4cdd-4e3e-a6a1-a7d4876131b5 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 935.215446] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-29422a3d-8f2b-4063-98ab-2dad00a60e35 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.219793] env[62503]: DEBUG oslo_vmware.api [None req-5d3efbe8-4cdd-4e3e-a6a1-a7d4876131b5 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Waiting for the task: (returnval){ [ 935.219793] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]5245db8d-e68e-230a-609b-16810a6e513c" [ 935.219793] env[62503]: _type = "Task" [ 935.219793] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 935.226961] env[62503]: DEBUG oslo_vmware.api [None req-5d3efbe8-4cdd-4e3e-a6a1-a7d4876131b5 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]5245db8d-e68e-230a-609b-16810a6e513c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.393402] env[62503]: DEBUG nova.compute.utils [None req-9e86ee46-4d8c-4f28-84b2-a369260b22e3 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Using /dev/sd instead of None {{(pid=62503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 935.397310] env[62503]: DEBUG nova.compute.manager [None req-9e86ee46-4d8c-4f28-84b2-a369260b22e3 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: e0df0ce5-1e88-4a39-8911-529b235f5b88] Allocating IP information in the background. {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 935.397605] env[62503]: DEBUG nova.network.neutron [None req-9e86ee46-4d8c-4f28-84b2-a369260b22e3 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: e0df0ce5-1e88-4a39-8911-529b235f5b88] allocate_for_instance() {{(pid=62503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 935.435793] env[62503]: DEBUG nova.policy [None req-9e86ee46-4d8c-4f28-84b2-a369260b22e3 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b0e90b6b4c414583af760c03e336e4d1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f823912f7b1a4998a6dbc22060cf6c5e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62503) authorize /opt/stack/nova/nova/policy.py:201}} [ 935.515107] env[62503]: DEBUG oslo_concurrency.lockutils [None req-7eeeb217-fdb2-41f4-98af-448aa496c9ae tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Acquiring lock "refresh_cache-5ba614a3-17be-4069-8219-f88f4d27aab9" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 935.515255] env[62503]: DEBUG oslo_concurrency.lockutils [None req-7eeeb217-fdb2-41f4-98af-448aa496c9ae tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Acquired lock "refresh_cache-5ba614a3-17be-4069-8219-f88f4d27aab9" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 935.515402] env[62503]: DEBUG nova.network.neutron [None req-7eeeb217-fdb2-41f4-98af-448aa496c9ae tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: 5ba614a3-17be-4069-8219-f88f4d27aab9] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 935.551974] env[62503]: INFO nova.compute.manager [None req-f609ecce-b0c9-40f4-a25e-33556aca01b7 tempest-InstanceActionsV221TestJSON-959512409 tempest-InstanceActionsV221TestJSON-959512409-project-member] [instance: 35bd28b5-101e-429f-8487-fbe5bf3528fb] Took 13.89 seconds to build instance. [ 935.558798] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5615c254-b017-4095-b78f-990d9c5fcaba {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.569844] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d87e7dd5-0588-4ec2-95e7-ae02a524ab3d {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.573609] env[62503]: DEBUG nova.network.neutron [req-0a798296-c2d8-4279-a8fa-76b3fdd883cf req-e6b6e662-bd3a-4e15-a944-ec757f1f1b04 service nova] [instance: 09ff6008-f1eb-4ee0-af7d-c7f8268e3eb9] Updated VIF entry in instance network info cache for port 814e714d-f9f3-47d3-a228-0f1fd7000eb1. {{(pid=62503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 935.573936] env[62503]: DEBUG nova.network.neutron [req-0a798296-c2d8-4279-a8fa-76b3fdd883cf req-e6b6e662-bd3a-4e15-a944-ec757f1f1b04 service nova] [instance: 09ff6008-f1eb-4ee0-af7d-c7f8268e3eb9] Updating instance_info_cache with network_info: [{"id": "814e714d-f9f3-47d3-a228-0f1fd7000eb1", "address": "fa:16:3e:3e:5c:6e", "network": {"id": "9b07889f-f8fb-4eed-a587-1e31b05ce03a", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1446403066-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a833cd3315d0487cb3badd7b0d330a9a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ec3f9e71-839a-429d-b211-d3dfc98ca4f6", "external-id": "nsx-vlan-transportzone-5", "segmentation_id": 5, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap814e714d-f9", "ovs_interfaceid": "814e714d-f9f3-47d3-a228-0f1fd7000eb1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 935.605902] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7456dda4-808f-4e57-a880-22e6c3539a2a {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.614046] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebc61f77-caa8-49d6-af5d-ceab6f0d0602 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.627839] env[62503]: DEBUG nova.compute.provider_tree [None req-f7331f54-a763-48c7-a6c8-0aa9c4ad457f tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 935.732327] env[62503]: DEBUG oslo_vmware.api [None req-5d3efbe8-4cdd-4e3e-a6a1-a7d4876131b5 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]5245db8d-e68e-230a-609b-16810a6e513c, 'name': SearchDatastore_Task, 'duration_secs': 0.00925} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 935.732327] env[62503]: DEBUG oslo_concurrency.lockutils [None req-5d3efbe8-4cdd-4e3e-a6a1-a7d4876131b5 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Releasing lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 935.732327] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-5d3efbe8-4cdd-4e3e-a6a1-a7d4876131b5 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: 09ff6008-f1eb-4ee0-af7d-c7f8268e3eb9] Processing image 8150ca02-f879-471d-8913-459408f127a1 {{(pid=62503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 935.732327] env[62503]: DEBUG oslo_concurrency.lockutils [None req-5d3efbe8-4cdd-4e3e-a6a1-a7d4876131b5 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 935.732327] env[62503]: DEBUG oslo_concurrency.lockutils [None req-5d3efbe8-4cdd-4e3e-a6a1-a7d4876131b5 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Acquired lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 935.732327] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-5d3efbe8-4cdd-4e3e-a6a1-a7d4876131b5 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 935.732327] env[62503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-744e8ed6-a22c-4cb4-b860-ef1181cc1622 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.739623] env[62503]: DEBUG nova.network.neutron [None req-9e86ee46-4d8c-4f28-84b2-a369260b22e3 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: e0df0ce5-1e88-4a39-8911-529b235f5b88] Successfully created port: 41418625-ee00-4672-80fc-df777f1e7301 {{(pid=62503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 935.742453] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-5d3efbe8-4cdd-4e3e-a6a1-a7d4876131b5 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 935.742634] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-5d3efbe8-4cdd-4e3e-a6a1-a7d4876131b5 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 935.743348] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e4bff2f9-da9b-47fc-b3af-9b5b47faf264 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.749494] env[62503]: DEBUG oslo_vmware.api [None req-5d3efbe8-4cdd-4e3e-a6a1-a7d4876131b5 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Waiting for the task: (returnval){ [ 935.749494] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52954945-dcc9-5272-ab94-e0ce84b7c052" [ 935.749494] env[62503]: _type = "Task" [ 935.749494] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 935.756632] env[62503]: DEBUG oslo_vmware.api [None req-5d3efbe8-4cdd-4e3e-a6a1-a7d4876131b5 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52954945-dcc9-5272-ab94-e0ce84b7c052, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.900299] env[62503]: DEBUG nova.compute.manager [None req-9e86ee46-4d8c-4f28-84b2-a369260b22e3 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: e0df0ce5-1e88-4a39-8911-529b235f5b88] Start building block device mappings for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2867}} [ 936.052348] env[62503]: DEBUG nova.network.neutron [None req-7eeeb217-fdb2-41f4-98af-448aa496c9ae tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: 5ba614a3-17be-4069-8219-f88f4d27aab9] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 936.054611] env[62503]: DEBUG oslo_concurrency.lockutils [None req-f609ecce-b0c9-40f4-a25e-33556aca01b7 tempest-InstanceActionsV221TestJSON-959512409 tempest-InstanceActionsV221TestJSON-959512409-project-member] Lock "35bd28b5-101e-429f-8487-fbe5bf3528fb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.403s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 936.076737] env[62503]: DEBUG oslo_concurrency.lockutils [req-0a798296-c2d8-4279-a8fa-76b3fdd883cf req-e6b6e662-bd3a-4e15-a944-ec757f1f1b04 service nova] Releasing lock "refresh_cache-09ff6008-f1eb-4ee0-af7d-c7f8268e3eb9" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 936.106344] env[62503]: DEBUG nova.objects.instance [None req-4af1549b-3fbf-4f0a-aa84-7c2e2f786268 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Lazy-loading 'flavor' on Instance uuid e693bcc2-3883-466d-913c-831146ca81e7 {{(pid=62503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 936.130477] env[62503]: DEBUG nova.scheduler.client.report [None req-f7331f54-a763-48c7-a6c8-0aa9c4ad457f tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 936.184471] env[62503]: DEBUG nova.network.neutron [None req-7eeeb217-fdb2-41f4-98af-448aa496c9ae tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: 5ba614a3-17be-4069-8219-f88f4d27aab9] Updating instance_info_cache with network_info: [{"id": "4be6b321-1129-4f1b-9ca4-aa83bc5b715d", "address": "fa:16:3e:51:47:fd", "network": {"id": "1705446b-db63-4b14-9929-b692ca586b36", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1437181237-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e818e5ee9dc24efa96747c9558514a15", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "77aa121f-8fb6-42f3-aaea-43addfe449b2", "external-id": "nsx-vlan-transportzone-288", "segmentation_id": 288, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4be6b321-11", "ovs_interfaceid": "4be6b321-1129-4f1b-9ca4-aa83bc5b715d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 936.259081] env[62503]: DEBUG oslo_vmware.api [None req-5d3efbe8-4cdd-4e3e-a6a1-a7d4876131b5 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52954945-dcc9-5272-ab94-e0ce84b7c052, 'name': SearchDatastore_Task, 'duration_secs': 0.008587} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 936.259914] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2f39216d-1f13-4bbb-a424-8715b8de0bc3 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.266131] env[62503]: DEBUG oslo_vmware.api [None req-5d3efbe8-4cdd-4e3e-a6a1-a7d4876131b5 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Waiting for the task: (returnval){ [ 936.266131] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52d313c7-6bc4-69ea-6136-6c6ac8b7dc4d" [ 936.266131] env[62503]: _type = "Task" [ 936.266131] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 936.274319] env[62503]: DEBUG oslo_vmware.api [None req-5d3efbe8-4cdd-4e3e-a6a1-a7d4876131b5 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52d313c7-6bc4-69ea-6136-6c6ac8b7dc4d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.385814] env[62503]: DEBUG oslo_concurrency.lockutils [None req-10c12c03-dafe-4bd9-9815-86ee70601878 tempest-InstanceActionsV221TestJSON-959512409 tempest-InstanceActionsV221TestJSON-959512409-project-member] Acquiring lock "35bd28b5-101e-429f-8487-fbe5bf3528fb" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 936.385968] env[62503]: DEBUG oslo_concurrency.lockutils [None req-10c12c03-dafe-4bd9-9815-86ee70601878 tempest-InstanceActionsV221TestJSON-959512409 tempest-InstanceActionsV221TestJSON-959512409-project-member] Lock "35bd28b5-101e-429f-8487-fbe5bf3528fb" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 936.386189] env[62503]: DEBUG oslo_concurrency.lockutils [None req-10c12c03-dafe-4bd9-9815-86ee70601878 tempest-InstanceActionsV221TestJSON-959512409 tempest-InstanceActionsV221TestJSON-959512409-project-member] Acquiring lock "35bd28b5-101e-429f-8487-fbe5bf3528fb-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 936.386430] env[62503]: DEBUG oslo_concurrency.lockutils [None req-10c12c03-dafe-4bd9-9815-86ee70601878 tempest-InstanceActionsV221TestJSON-959512409 tempest-InstanceActionsV221TestJSON-959512409-project-member] Lock "35bd28b5-101e-429f-8487-fbe5bf3528fb-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 936.386560] env[62503]: DEBUG oslo_concurrency.lockutils [None req-10c12c03-dafe-4bd9-9815-86ee70601878 tempest-InstanceActionsV221TestJSON-959512409 tempest-InstanceActionsV221TestJSON-959512409-project-member] Lock "35bd28b5-101e-429f-8487-fbe5bf3528fb-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 936.389742] env[62503]: INFO nova.compute.manager [None req-10c12c03-dafe-4bd9-9815-86ee70601878 tempest-InstanceActionsV221TestJSON-959512409 tempest-InstanceActionsV221TestJSON-959512409-project-member] [instance: 35bd28b5-101e-429f-8487-fbe5bf3528fb] Terminating instance [ 936.390616] env[62503]: DEBUG nova.compute.manager [None req-10c12c03-dafe-4bd9-9815-86ee70601878 tempest-InstanceActionsV221TestJSON-959512409 tempest-InstanceActionsV221TestJSON-959512409-project-member] [instance: 35bd28b5-101e-429f-8487-fbe5bf3528fb] Start destroying the instance on the hypervisor. {{(pid=62503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3156}} [ 936.390991] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-10c12c03-dafe-4bd9-9815-86ee70601878 tempest-InstanceActionsV221TestJSON-959512409 tempest-InstanceActionsV221TestJSON-959512409-project-member] [instance: 35bd28b5-101e-429f-8487-fbe5bf3528fb] Destroying instance {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 936.391812] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5682b3f9-aee2-4f2a-9812-305eb1ea42d1 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.399213] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-10c12c03-dafe-4bd9-9815-86ee70601878 tempest-InstanceActionsV221TestJSON-959512409 tempest-InstanceActionsV221TestJSON-959512409-project-member] [instance: 35bd28b5-101e-429f-8487-fbe5bf3528fb] Powering off the VM {{(pid=62503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 936.399446] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8ad72c99-2edc-49b9-bb15-cb873a57acff {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.410019] env[62503]: DEBUG oslo_vmware.api [None req-10c12c03-dafe-4bd9-9815-86ee70601878 tempest-InstanceActionsV221TestJSON-959512409 tempest-InstanceActionsV221TestJSON-959512409-project-member] Waiting for the task: (returnval){ [ 936.410019] env[62503]: value = "task-1387973" [ 936.410019] env[62503]: _type = "Task" [ 936.410019] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 936.417523] env[62503]: DEBUG oslo_vmware.api [None req-10c12c03-dafe-4bd9-9815-86ee70601878 tempest-InstanceActionsV221TestJSON-959512409 tempest-InstanceActionsV221TestJSON-959512409-project-member] Task: {'id': task-1387973, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.611018] env[62503]: DEBUG oslo_concurrency.lockutils [None req-4af1549b-3fbf-4f0a-aa84-7c2e2f786268 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Lock "e693bcc2-3883-466d-913c-831146ca81e7" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.265s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 936.634973] env[62503]: DEBUG oslo_concurrency.lockutils [None req-f7331f54-a763-48c7-a6c8-0aa9c4ad457f tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.748s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 936.637253] env[62503]: DEBUG oslo_concurrency.lockutils [None req-eadec515-4bb7-4564-9192-b80cbbecb99d tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 9.766s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 936.637495] env[62503]: DEBUG nova.objects.instance [None req-eadec515-4bb7-4564-9192-b80cbbecb99d tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Lazy-loading 'resources' on Instance uuid 04054a79-70b6-409a-981f-6bf99fc3b4fc {{(pid=62503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 936.652791] env[62503]: INFO nova.scheduler.client.report [None req-f7331f54-a763-48c7-a6c8-0aa9c4ad457f tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Deleted allocations for instance 9ccdc727-536e-4db8-bad4-960858254758 [ 936.687271] env[62503]: DEBUG oslo_concurrency.lockutils [None req-7eeeb217-fdb2-41f4-98af-448aa496c9ae tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Releasing lock "refresh_cache-5ba614a3-17be-4069-8219-f88f4d27aab9" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 936.687661] env[62503]: DEBUG nova.compute.manager [None req-7eeeb217-fdb2-41f4-98af-448aa496c9ae tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: 5ba614a3-17be-4069-8219-f88f4d27aab9] Instance network_info: |[{"id": "4be6b321-1129-4f1b-9ca4-aa83bc5b715d", "address": "fa:16:3e:51:47:fd", "network": {"id": "1705446b-db63-4b14-9929-b692ca586b36", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1437181237-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e818e5ee9dc24efa96747c9558514a15", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "77aa121f-8fb6-42f3-aaea-43addfe449b2", "external-id": "nsx-vlan-transportzone-288", "segmentation_id": 288, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4be6b321-11", "ovs_interfaceid": "4be6b321-1129-4f1b-9ca4-aa83bc5b715d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1999}} [ 936.688123] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-7eeeb217-fdb2-41f4-98af-448aa496c9ae tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: 5ba614a3-17be-4069-8219-f88f4d27aab9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:51:47:fd', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '77aa121f-8fb6-42f3-aaea-43addfe449b2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4be6b321-1129-4f1b-9ca4-aa83bc5b715d', 'vif_model': 'vmxnet3'}] {{(pid=62503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 936.695648] env[62503]: DEBUG oslo.service.loopingcall [None req-7eeeb217-fdb2-41f4-98af-448aa496c9ae tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 936.695892] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5ba614a3-17be-4069-8219-f88f4d27aab9] Creating VM on the ESX host {{(pid=62503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 936.696467] env[62503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-526972f4-97a6-442c-b408-03893479267c {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.717632] env[62503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 936.717632] env[62503]: value = "task-1387974" [ 936.717632] env[62503]: _type = "Task" [ 936.717632] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 936.724889] env[62503]: DEBUG oslo_vmware.api [-] Task: {'id': task-1387974, 'name': CreateVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.777720] env[62503]: DEBUG oslo_vmware.api [None req-5d3efbe8-4cdd-4e3e-a6a1-a7d4876131b5 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52d313c7-6bc4-69ea-6136-6c6ac8b7dc4d, 'name': SearchDatastore_Task, 'duration_secs': 0.010595} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 936.777720] env[62503]: DEBUG oslo_concurrency.lockutils [None req-5d3efbe8-4cdd-4e3e-a6a1-a7d4876131b5 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Releasing lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 936.777720] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-5d3efbe8-4cdd-4e3e-a6a1-a7d4876131b5 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk to [datastore1] 09ff6008-f1eb-4ee0-af7d-c7f8268e3eb9/09ff6008-f1eb-4ee0-af7d-c7f8268e3eb9.vmdk {{(pid=62503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 936.778394] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-45b390ff-e815-4e3b-b051-8f065f2ea884 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.789021] env[62503]: DEBUG oslo_vmware.api [None req-5d3efbe8-4cdd-4e3e-a6a1-a7d4876131b5 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Waiting for the task: (returnval){ [ 936.789021] env[62503]: value = "task-1387975" [ 936.789021] env[62503]: _type = "Task" [ 936.789021] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 936.796294] env[62503]: DEBUG oslo_vmware.api [None req-5d3efbe8-4cdd-4e3e-a6a1-a7d4876131b5 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Task: {'id': task-1387975, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.909038] env[62503]: DEBUG nova.compute.manager [None req-9e86ee46-4d8c-4f28-84b2-a369260b22e3 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: e0df0ce5-1e88-4a39-8911-529b235f5b88] Start spawning the instance on the hypervisor. {{(pid=62503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2641}} [ 936.921620] env[62503]: DEBUG oslo_vmware.api [None req-10c12c03-dafe-4bd9-9815-86ee70601878 tempest-InstanceActionsV221TestJSON-959512409 tempest-InstanceActionsV221TestJSON-959512409-project-member] Task: {'id': task-1387973, 'name': PowerOffVM_Task, 'duration_secs': 0.18466} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 936.922131] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-10c12c03-dafe-4bd9-9815-86ee70601878 tempest-InstanceActionsV221TestJSON-959512409 tempest-InstanceActionsV221TestJSON-959512409-project-member] [instance: 35bd28b5-101e-429f-8487-fbe5bf3528fb] Powered off the VM {{(pid=62503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 936.922269] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-10c12c03-dafe-4bd9-9815-86ee70601878 tempest-InstanceActionsV221TestJSON-959512409 tempest-InstanceActionsV221TestJSON-959512409-project-member] [instance: 35bd28b5-101e-429f-8487-fbe5bf3528fb] Unregistering the VM {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 936.925008] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-afadc6bf-4c25-4387-8010-be28233d207a {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.934655] env[62503]: DEBUG nova.virt.hardware [None req-9e86ee46-4d8c-4f28-84b2-a369260b22e3 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:26:20Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:26:03Z,direct_url=,disk_format='vmdk',id=8150ca02-f879-471d-8913-459408f127a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26d9ee75d25b4018b8bb1fb11c8bd98c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:26:04Z,virtual_size=,visibility=), allow threads: False {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 936.934942] env[62503]: DEBUG nova.virt.hardware [None req-9e86ee46-4d8c-4f28-84b2-a369260b22e3 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Flavor limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 936.935972] env[62503]: DEBUG nova.virt.hardware [None req-9e86ee46-4d8c-4f28-84b2-a369260b22e3 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Image limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 936.935972] env[62503]: DEBUG nova.virt.hardware [None req-9e86ee46-4d8c-4f28-84b2-a369260b22e3 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Flavor pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 936.935972] env[62503]: DEBUG nova.virt.hardware [None req-9e86ee46-4d8c-4f28-84b2-a369260b22e3 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Image pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 936.935972] env[62503]: DEBUG nova.virt.hardware [None req-9e86ee46-4d8c-4f28-84b2-a369260b22e3 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 936.936226] env[62503]: DEBUG nova.virt.hardware [None req-9e86ee46-4d8c-4f28-84b2-a369260b22e3 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 936.936475] env[62503]: DEBUG nova.virt.hardware [None req-9e86ee46-4d8c-4f28-84b2-a369260b22e3 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 936.936977] env[62503]: DEBUG nova.virt.hardware [None req-9e86ee46-4d8c-4f28-84b2-a369260b22e3 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Got 1 possible topologies {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 936.937136] env[62503]: DEBUG nova.virt.hardware [None req-9e86ee46-4d8c-4f28-84b2-a369260b22e3 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 936.937289] env[62503]: DEBUG nova.virt.hardware [None req-9e86ee46-4d8c-4f28-84b2-a369260b22e3 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 936.938264] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7d4b739-39f9-46cd-889a-f5a3e724cef2 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.947898] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e09f16e9-5468-4476-833d-3bf00e39a174 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.965890] env[62503]: DEBUG nova.compute.manager [req-3dfcf80f-9bdc-4bec-95fa-d2fb5e0df96e req-f0548204-e619-491b-a666-c9e214dde51a service nova] [instance: 5ba614a3-17be-4069-8219-f88f4d27aab9] Received event network-vif-plugged-4be6b321-1129-4f1b-9ca4-aa83bc5b715d {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 936.966153] env[62503]: DEBUG oslo_concurrency.lockutils [req-3dfcf80f-9bdc-4bec-95fa-d2fb5e0df96e req-f0548204-e619-491b-a666-c9e214dde51a service nova] Acquiring lock "5ba614a3-17be-4069-8219-f88f4d27aab9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 936.966351] env[62503]: DEBUG oslo_concurrency.lockutils [req-3dfcf80f-9bdc-4bec-95fa-d2fb5e0df96e req-f0548204-e619-491b-a666-c9e214dde51a service nova] Lock "5ba614a3-17be-4069-8219-f88f4d27aab9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 936.966545] env[62503]: DEBUG oslo_concurrency.lockutils [req-3dfcf80f-9bdc-4bec-95fa-d2fb5e0df96e req-f0548204-e619-491b-a666-c9e214dde51a service nova] Lock "5ba614a3-17be-4069-8219-f88f4d27aab9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 936.966684] env[62503]: DEBUG nova.compute.manager [req-3dfcf80f-9bdc-4bec-95fa-d2fb5e0df96e req-f0548204-e619-491b-a666-c9e214dde51a service nova] [instance: 5ba614a3-17be-4069-8219-f88f4d27aab9] No waiting events found dispatching network-vif-plugged-4be6b321-1129-4f1b-9ca4-aa83bc5b715d {{(pid=62503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 936.966850] env[62503]: WARNING nova.compute.manager [req-3dfcf80f-9bdc-4bec-95fa-d2fb5e0df96e req-f0548204-e619-491b-a666-c9e214dde51a service nova] [instance: 5ba614a3-17be-4069-8219-f88f4d27aab9] Received unexpected event network-vif-plugged-4be6b321-1129-4f1b-9ca4-aa83bc5b715d for instance with vm_state building and task_state spawning. [ 936.967040] env[62503]: DEBUG nova.compute.manager [req-3dfcf80f-9bdc-4bec-95fa-d2fb5e0df96e req-f0548204-e619-491b-a666-c9e214dde51a service nova] [instance: 5ba614a3-17be-4069-8219-f88f4d27aab9] Received event network-changed-4be6b321-1129-4f1b-9ca4-aa83bc5b715d {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 936.967180] env[62503]: DEBUG nova.compute.manager [req-3dfcf80f-9bdc-4bec-95fa-d2fb5e0df96e req-f0548204-e619-491b-a666-c9e214dde51a service nova] [instance: 5ba614a3-17be-4069-8219-f88f4d27aab9] Refreshing instance network info cache due to event network-changed-4be6b321-1129-4f1b-9ca4-aa83bc5b715d. {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11432}} [ 936.967386] env[62503]: DEBUG oslo_concurrency.lockutils [req-3dfcf80f-9bdc-4bec-95fa-d2fb5e0df96e req-f0548204-e619-491b-a666-c9e214dde51a service nova] Acquiring lock "refresh_cache-5ba614a3-17be-4069-8219-f88f4d27aab9" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 936.967517] env[62503]: DEBUG oslo_concurrency.lockutils [req-3dfcf80f-9bdc-4bec-95fa-d2fb5e0df96e req-f0548204-e619-491b-a666-c9e214dde51a service nova] Acquired lock "refresh_cache-5ba614a3-17be-4069-8219-f88f4d27aab9" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 936.967674] env[62503]: DEBUG nova.network.neutron [req-3dfcf80f-9bdc-4bec-95fa-d2fb5e0df96e req-f0548204-e619-491b-a666-c9e214dde51a service nova] [instance: 5ba614a3-17be-4069-8219-f88f4d27aab9] Refreshing network info cache for port 4be6b321-1129-4f1b-9ca4-aa83bc5b715d {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 936.996898] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-10c12c03-dafe-4bd9-9815-86ee70601878 tempest-InstanceActionsV221TestJSON-959512409 tempest-InstanceActionsV221TestJSON-959512409-project-member] [instance: 35bd28b5-101e-429f-8487-fbe5bf3528fb] Unregistered the VM {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 936.997287] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-10c12c03-dafe-4bd9-9815-86ee70601878 tempest-InstanceActionsV221TestJSON-959512409 tempest-InstanceActionsV221TestJSON-959512409-project-member] [instance: 35bd28b5-101e-429f-8487-fbe5bf3528fb] Deleting contents of the VM from datastore datastore2 {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 936.997630] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-10c12c03-dafe-4bd9-9815-86ee70601878 tempest-InstanceActionsV221TestJSON-959512409 tempest-InstanceActionsV221TestJSON-959512409-project-member] Deleting the datastore file [datastore2] 35bd28b5-101e-429f-8487-fbe5bf3528fb {{(pid=62503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 936.998131] env[62503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bf108e4e-373f-499c-bab6-03ab6c7d3381 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.005399] env[62503]: DEBUG oslo_vmware.api [None req-10c12c03-dafe-4bd9-9815-86ee70601878 tempest-InstanceActionsV221TestJSON-959512409 tempest-InstanceActionsV221TestJSON-959512409-project-member] Waiting for the task: (returnval){ [ 937.005399] env[62503]: value = "task-1387977" [ 937.005399] env[62503]: _type = "Task" [ 937.005399] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 937.014275] env[62503]: DEBUG oslo_vmware.api [None req-10c12c03-dafe-4bd9-9815-86ee70601878 tempest-InstanceActionsV221TestJSON-959512409 tempest-InstanceActionsV221TestJSON-959512409-project-member] Task: {'id': task-1387977, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.161432] env[62503]: DEBUG oslo_concurrency.lockutils [None req-f7331f54-a763-48c7-a6c8-0aa9c4ad457f tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Lock "9ccdc727-536e-4db8-bad4-960858254758" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 14.404s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 937.233427] env[62503]: DEBUG oslo_vmware.api [-] Task: {'id': task-1387974, 'name': CreateVM_Task} progress is 25%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.295881] env[62503]: DEBUG oslo_vmware.api [None req-5d3efbe8-4cdd-4e3e-a6a1-a7d4876131b5 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Task: {'id': task-1387975, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.340876] env[62503]: DEBUG nova.compute.manager [req-67789361-f0e1-4967-84a7-a9001c89d38b req-4bb376aa-9d72-4544-b837-b4c821c124af service nova] [instance: e0df0ce5-1e88-4a39-8911-529b235f5b88] Received event network-vif-plugged-41418625-ee00-4672-80fc-df777f1e7301 {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 937.342185] env[62503]: DEBUG oslo_concurrency.lockutils [req-67789361-f0e1-4967-84a7-a9001c89d38b req-4bb376aa-9d72-4544-b837-b4c821c124af service nova] Acquiring lock "e0df0ce5-1e88-4a39-8911-529b235f5b88-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 937.342185] env[62503]: DEBUG oslo_concurrency.lockutils [req-67789361-f0e1-4967-84a7-a9001c89d38b req-4bb376aa-9d72-4544-b837-b4c821c124af service nova] Lock "e0df0ce5-1e88-4a39-8911-529b235f5b88-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 937.342185] env[62503]: DEBUG oslo_concurrency.lockutils [req-67789361-f0e1-4967-84a7-a9001c89d38b req-4bb376aa-9d72-4544-b837-b4c821c124af service nova] Lock "e0df0ce5-1e88-4a39-8911-529b235f5b88-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 937.342185] env[62503]: DEBUG nova.compute.manager [req-67789361-f0e1-4967-84a7-a9001c89d38b req-4bb376aa-9d72-4544-b837-b4c821c124af service nova] [instance: e0df0ce5-1e88-4a39-8911-529b235f5b88] No waiting events found dispatching network-vif-plugged-41418625-ee00-4672-80fc-df777f1e7301 {{(pid=62503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 937.342185] env[62503]: WARNING nova.compute.manager [req-67789361-f0e1-4967-84a7-a9001c89d38b req-4bb376aa-9d72-4544-b837-b4c821c124af service nova] [instance: e0df0ce5-1e88-4a39-8911-529b235f5b88] Received unexpected event network-vif-plugged-41418625-ee00-4672-80fc-df777f1e7301 for instance with vm_state building and task_state spawning. [ 937.350333] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-494dd340-7e68-4447-8516-e2cb7dff903a {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.358418] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d61a6df-1fe0-4d1b-8195-24357f324b3a {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.394876] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-257bac8e-e539-45ac-8824-ffb185502449 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.400138] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aad0fc9d-bb73-4b35-80e6-6b3d107e8ef9 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.416387] env[62503]: DEBUG nova.compute.provider_tree [None req-eadec515-4bb7-4564-9192-b80cbbecb99d tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 937.418339] env[62503]: DEBUG nova.network.neutron [None req-9e86ee46-4d8c-4f28-84b2-a369260b22e3 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: e0df0ce5-1e88-4a39-8911-529b235f5b88] Successfully updated port: 41418625-ee00-4672-80fc-df777f1e7301 {{(pid=62503) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 937.515076] env[62503]: DEBUG oslo_vmware.api [None req-10c12c03-dafe-4bd9-9815-86ee70601878 tempest-InstanceActionsV221TestJSON-959512409 tempest-InstanceActionsV221TestJSON-959512409-project-member] Task: {'id': task-1387977, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.240275} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 937.515346] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-10c12c03-dafe-4bd9-9815-86ee70601878 tempest-InstanceActionsV221TestJSON-959512409 tempest-InstanceActionsV221TestJSON-959512409-project-member] Deleted the datastore file {{(pid=62503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 937.515687] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-10c12c03-dafe-4bd9-9815-86ee70601878 tempest-InstanceActionsV221TestJSON-959512409 tempest-InstanceActionsV221TestJSON-959512409-project-member] [instance: 35bd28b5-101e-429f-8487-fbe5bf3528fb] Deleted contents of the VM from datastore datastore2 {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 937.515785] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-10c12c03-dafe-4bd9-9815-86ee70601878 tempest-InstanceActionsV221TestJSON-959512409 tempest-InstanceActionsV221TestJSON-959512409-project-member] [instance: 35bd28b5-101e-429f-8487-fbe5bf3528fb] Instance destroyed {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 937.515890] env[62503]: INFO nova.compute.manager [None req-10c12c03-dafe-4bd9-9815-86ee70601878 tempest-InstanceActionsV221TestJSON-959512409 tempest-InstanceActionsV221TestJSON-959512409-project-member] [instance: 35bd28b5-101e-429f-8487-fbe5bf3528fb] Took 1.13 seconds to destroy the instance on the hypervisor. [ 937.516160] env[62503]: DEBUG oslo.service.loopingcall [None req-10c12c03-dafe-4bd9-9815-86ee70601878 tempest-InstanceActionsV221TestJSON-959512409 tempest-InstanceActionsV221TestJSON-959512409-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 937.516360] env[62503]: DEBUG nova.compute.manager [-] [instance: 35bd28b5-101e-429f-8487-fbe5bf3528fb] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 937.516769] env[62503]: DEBUG nova.network.neutron [-] [instance: 35bd28b5-101e-429f-8487-fbe5bf3528fb] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 937.667892] env[62503]: DEBUG nova.network.neutron [req-3dfcf80f-9bdc-4bec-95fa-d2fb5e0df96e req-f0548204-e619-491b-a666-c9e214dde51a service nova] [instance: 5ba614a3-17be-4069-8219-f88f4d27aab9] Updated VIF entry in instance network info cache for port 4be6b321-1129-4f1b-9ca4-aa83bc5b715d. {{(pid=62503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 937.668336] env[62503]: DEBUG nova.network.neutron [req-3dfcf80f-9bdc-4bec-95fa-d2fb5e0df96e req-f0548204-e619-491b-a666-c9e214dde51a service nova] [instance: 5ba614a3-17be-4069-8219-f88f4d27aab9] Updating instance_info_cache with network_info: [{"id": "4be6b321-1129-4f1b-9ca4-aa83bc5b715d", "address": "fa:16:3e:51:47:fd", "network": {"id": "1705446b-db63-4b14-9929-b692ca586b36", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1437181237-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e818e5ee9dc24efa96747c9558514a15", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "77aa121f-8fb6-42f3-aaea-43addfe449b2", "external-id": "nsx-vlan-transportzone-288", "segmentation_id": 288, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4be6b321-11", "ovs_interfaceid": "4be6b321-1129-4f1b-9ca4-aa83bc5b715d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 937.728597] env[62503]: DEBUG oslo_vmware.api [-] Task: {'id': task-1387974, 'name': CreateVM_Task} progress is 99%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.797166] env[62503]: DEBUG oslo_vmware.api [None req-5d3efbe8-4cdd-4e3e-a6a1-a7d4876131b5 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Task: {'id': task-1387975, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.511371} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 937.797506] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-5d3efbe8-4cdd-4e3e-a6a1-a7d4876131b5 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk to [datastore1] 09ff6008-f1eb-4ee0-af7d-c7f8268e3eb9/09ff6008-f1eb-4ee0-af7d-c7f8268e3eb9.vmdk {{(pid=62503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 937.797736] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-5d3efbe8-4cdd-4e3e-a6a1-a7d4876131b5 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: 09ff6008-f1eb-4ee0-af7d-c7f8268e3eb9] Extending root virtual disk to 1048576 {{(pid=62503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 937.797991] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-335563a5-3f89-4225-9732-971e2a89445e {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.803774] env[62503]: DEBUG oslo_vmware.api [None req-5d3efbe8-4cdd-4e3e-a6a1-a7d4876131b5 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Waiting for the task: (returnval){ [ 937.803774] env[62503]: value = "task-1387978" [ 937.803774] env[62503]: _type = "Task" [ 937.803774] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 937.815022] env[62503]: DEBUG oslo_vmware.api [None req-5d3efbe8-4cdd-4e3e-a6a1-a7d4876131b5 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Task: {'id': task-1387978, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.911117] env[62503]: DEBUG oslo_concurrency.lockutils [None req-1f603411-3ca3-43e0-a6ad-82a61ccb75d8 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Acquiring lock "ce9c4d77-6cbe-411d-a0fd-d77a6d6f36b7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 937.911414] env[62503]: DEBUG oslo_concurrency.lockutils [None req-1f603411-3ca3-43e0-a6ad-82a61ccb75d8 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Lock "ce9c4d77-6cbe-411d-a0fd-d77a6d6f36b7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 937.920962] env[62503]: DEBUG nova.scheduler.client.report [None req-eadec515-4bb7-4564-9192-b80cbbecb99d tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 937.923935] env[62503]: DEBUG oslo_concurrency.lockutils [None req-9e86ee46-4d8c-4f28-84b2-a369260b22e3 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Acquiring lock "refresh_cache-e0df0ce5-1e88-4a39-8911-529b235f5b88" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 937.924091] env[62503]: DEBUG oslo_concurrency.lockutils [None req-9e86ee46-4d8c-4f28-84b2-a369260b22e3 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Acquired lock "refresh_cache-e0df0ce5-1e88-4a39-8911-529b235f5b88" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 937.924236] env[62503]: DEBUG nova.network.neutron [None req-9e86ee46-4d8c-4f28-84b2-a369260b22e3 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: e0df0ce5-1e88-4a39-8911-529b235f5b88] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 938.171496] env[62503]: DEBUG oslo_concurrency.lockutils [req-3dfcf80f-9bdc-4bec-95fa-d2fb5e0df96e req-f0548204-e619-491b-a666-c9e214dde51a service nova] Releasing lock "refresh_cache-5ba614a3-17be-4069-8219-f88f4d27aab9" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 938.229825] env[62503]: DEBUG oslo_vmware.api [-] Task: {'id': task-1387974, 'name': CreateVM_Task, 'duration_secs': 1.025254} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 938.230009] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5ba614a3-17be-4069-8219-f88f4d27aab9] Created VM on the ESX host {{(pid=62503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 938.230840] env[62503]: DEBUG oslo_concurrency.lockutils [None req-7eeeb217-fdb2-41f4-98af-448aa496c9ae tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 938.231062] env[62503]: DEBUG oslo_concurrency.lockutils [None req-7eeeb217-fdb2-41f4-98af-448aa496c9ae tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Acquired lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 938.231399] env[62503]: DEBUG oslo_concurrency.lockutils [None req-7eeeb217-fdb2-41f4-98af-448aa496c9ae tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 938.231673] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a97d0058-7df1-499e-97d0-c73ecad5c298 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.236474] env[62503]: DEBUG oslo_vmware.api [None req-7eeeb217-fdb2-41f4-98af-448aa496c9ae tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Waiting for the task: (returnval){ [ 938.236474] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52ef06d7-3d7d-7af7-9eda-cd75c8ff29d7" [ 938.236474] env[62503]: _type = "Task" [ 938.236474] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 938.244009] env[62503]: DEBUG oslo_vmware.api [None req-7eeeb217-fdb2-41f4-98af-448aa496c9ae tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52ef06d7-3d7d-7af7-9eda-cd75c8ff29d7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.263484] env[62503]: DEBUG nova.network.neutron [-] [instance: 35bd28b5-101e-429f-8487-fbe5bf3528fb] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 938.313628] env[62503]: DEBUG oslo_vmware.api [None req-5d3efbe8-4cdd-4e3e-a6a1-a7d4876131b5 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Task: {'id': task-1387978, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.109412} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 938.313914] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-5d3efbe8-4cdd-4e3e-a6a1-a7d4876131b5 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: 09ff6008-f1eb-4ee0-af7d-c7f8268e3eb9] Extended root virtual disk {{(pid=62503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 938.314711] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2585be97-a020-4286-94b1-432a4635c3be {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.337082] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-5d3efbe8-4cdd-4e3e-a6a1-a7d4876131b5 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: 09ff6008-f1eb-4ee0-af7d-c7f8268e3eb9] Reconfiguring VM instance instance-00000054 to attach disk [datastore1] 09ff6008-f1eb-4ee0-af7d-c7f8268e3eb9/09ff6008-f1eb-4ee0-af7d-c7f8268e3eb9.vmdk or device None with type sparse {{(pid=62503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 938.337256] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a8482eed-f6cb-4c63-87bf-d20550a5285e {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.357255] env[62503]: DEBUG oslo_vmware.api [None req-5d3efbe8-4cdd-4e3e-a6a1-a7d4876131b5 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Waiting for the task: (returnval){ [ 938.357255] env[62503]: value = "task-1387979" [ 938.357255] env[62503]: _type = "Task" [ 938.357255] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 938.365468] env[62503]: DEBUG oslo_vmware.api [None req-5d3efbe8-4cdd-4e3e-a6a1-a7d4876131b5 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Task: {'id': task-1387979, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.415039] env[62503]: DEBUG nova.compute.manager [None req-1f603411-3ca3-43e0-a6ad-82a61ccb75d8 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: ce9c4d77-6cbe-411d-a0fd-d77a6d6f36b7] Starting instance... {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 938.426863] env[62503]: DEBUG oslo_concurrency.lockutils [None req-eadec515-4bb7-4564-9192-b80cbbecb99d tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.790s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 938.431298] env[62503]: DEBUG oslo_concurrency.lockutils [None req-74f2d880-0894-4c8b-96f9-91645aaf1004 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.175s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 938.433252] env[62503]: INFO nova.compute.claims [None req-74f2d880-0894-4c8b-96f9-91645aaf1004 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: 68f0c60d-ceff-4d7a-b81d-4845b4c5134c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 938.454673] env[62503]: INFO nova.scheduler.client.report [None req-eadec515-4bb7-4564-9192-b80cbbecb99d tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Deleted allocations for instance 04054a79-70b6-409a-981f-6bf99fc3b4fc [ 938.477972] env[62503]: DEBUG nova.network.neutron [None req-9e86ee46-4d8c-4f28-84b2-a369260b22e3 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: e0df0ce5-1e88-4a39-8911-529b235f5b88] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 938.616138] env[62503]: DEBUG nova.network.neutron [None req-9e86ee46-4d8c-4f28-84b2-a369260b22e3 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: e0df0ce5-1e88-4a39-8911-529b235f5b88] Updating instance_info_cache with network_info: [{"id": "41418625-ee00-4672-80fc-df777f1e7301", "address": "fa:16:3e:04:8d:27", "network": {"id": "fd4d9644-88d7-4c3d-9bee-9e25ca3a7d0b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2096443058-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f823912f7b1a4998a6dbc22060cf6c5e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "685b4083-b748-41fb-a68a-273b1073fa28", "external-id": "nsx-vlan-transportzone-312", "segmentation_id": 312, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap41418625-ee", "ovs_interfaceid": "41418625-ee00-4672-80fc-df777f1e7301", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 938.748739] env[62503]: DEBUG oslo_vmware.api [None req-7eeeb217-fdb2-41f4-98af-448aa496c9ae tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52ef06d7-3d7d-7af7-9eda-cd75c8ff29d7, 'name': SearchDatastore_Task, 'duration_secs': 0.019646} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 938.749193] env[62503]: DEBUG oslo_concurrency.lockutils [None req-7eeeb217-fdb2-41f4-98af-448aa496c9ae tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Releasing lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 938.749470] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-7eeeb217-fdb2-41f4-98af-448aa496c9ae tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: 5ba614a3-17be-4069-8219-f88f4d27aab9] Processing image 8150ca02-f879-471d-8913-459408f127a1 {{(pid=62503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 938.749758] env[62503]: DEBUG oslo_concurrency.lockutils [None req-7eeeb217-fdb2-41f4-98af-448aa496c9ae tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 938.750038] env[62503]: DEBUG oslo_concurrency.lockutils [None req-7eeeb217-fdb2-41f4-98af-448aa496c9ae tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Acquired lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 938.750360] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-7eeeb217-fdb2-41f4-98af-448aa496c9ae tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 938.750650] env[62503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-23aff4c4-fc71-42df-8a18-9f57aa9021e4 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.758439] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-7eeeb217-fdb2-41f4-98af-448aa496c9ae tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 938.758665] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-7eeeb217-fdb2-41f4-98af-448aa496c9ae tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 938.759771] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2f49baf5-9a58-4c7b-8430-c77a01eca083 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.764947] env[62503]: DEBUG oslo_vmware.api [None req-7eeeb217-fdb2-41f4-98af-448aa496c9ae tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Waiting for the task: (returnval){ [ 938.764947] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]520119ab-82d3-066b-bbe8-9442b07d7088" [ 938.764947] env[62503]: _type = "Task" [ 938.764947] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 938.765611] env[62503]: INFO nova.compute.manager [-] [instance: 35bd28b5-101e-429f-8487-fbe5bf3528fb] Took 1.25 seconds to deallocate network for instance. [ 938.776785] env[62503]: DEBUG oslo_vmware.api [None req-7eeeb217-fdb2-41f4-98af-448aa496c9ae tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]520119ab-82d3-066b-bbe8-9442b07d7088, 'name': SearchDatastore_Task, 'duration_secs': 0.00712} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 938.777804] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0ce0eede-29dc-4385-8b26-5b6eb6090195 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.782629] env[62503]: DEBUG oslo_vmware.api [None req-7eeeb217-fdb2-41f4-98af-448aa496c9ae tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Waiting for the task: (returnval){ [ 938.782629] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52b5f23b-aeb4-b938-3c5b-0546a46210c1" [ 938.782629] env[62503]: _type = "Task" [ 938.782629] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 938.790134] env[62503]: DEBUG oslo_vmware.api [None req-7eeeb217-fdb2-41f4-98af-448aa496c9ae tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52b5f23b-aeb4-b938-3c5b-0546a46210c1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.866147] env[62503]: DEBUG oslo_vmware.api [None req-5d3efbe8-4cdd-4e3e-a6a1-a7d4876131b5 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Task: {'id': task-1387979, 'name': ReconfigVM_Task, 'duration_secs': 0.317134} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 938.866376] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-5d3efbe8-4cdd-4e3e-a6a1-a7d4876131b5 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: 09ff6008-f1eb-4ee0-af7d-c7f8268e3eb9] Reconfigured VM instance instance-00000054 to attach disk [datastore1] 09ff6008-f1eb-4ee0-af7d-c7f8268e3eb9/09ff6008-f1eb-4ee0-af7d-c7f8268e3eb9.vmdk or device None with type sparse {{(pid=62503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 938.866985] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-be0efa81-1a14-4e9c-a13c-cff326daf621 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.872851] env[62503]: DEBUG oslo_vmware.api [None req-5d3efbe8-4cdd-4e3e-a6a1-a7d4876131b5 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Waiting for the task: (returnval){ [ 938.872851] env[62503]: value = "task-1387980" [ 938.872851] env[62503]: _type = "Task" [ 938.872851] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 938.879998] env[62503]: DEBUG oslo_vmware.api [None req-5d3efbe8-4cdd-4e3e-a6a1-a7d4876131b5 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Task: {'id': task-1387980, 'name': Rename_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.934592] env[62503]: DEBUG oslo_concurrency.lockutils [None req-1f603411-3ca3-43e0-a6ad-82a61ccb75d8 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 938.963093] env[62503]: DEBUG oslo_concurrency.lockutils [None req-eadec515-4bb7-4564-9192-b80cbbecb99d tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Lock "04054a79-70b6-409a-981f-6bf99fc3b4fc" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 15.557s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 938.993622] env[62503]: DEBUG nova.compute.manager [req-92a16ae6-9fb8-4cde-b555-bb95a10ff173 req-d7988cf2-8c49-4b7b-9b4d-f9c14be49d5a service nova] [instance: 35bd28b5-101e-429f-8487-fbe5bf3528fb] Received event network-vif-deleted-ab44a375-9f88-4434-9a19-932e1ea594f1 {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 939.118744] env[62503]: DEBUG oslo_concurrency.lockutils [None req-9e86ee46-4d8c-4f28-84b2-a369260b22e3 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Releasing lock "refresh_cache-e0df0ce5-1e88-4a39-8911-529b235f5b88" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 939.119170] env[62503]: DEBUG nova.compute.manager [None req-9e86ee46-4d8c-4f28-84b2-a369260b22e3 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: e0df0ce5-1e88-4a39-8911-529b235f5b88] Instance network_info: |[{"id": "41418625-ee00-4672-80fc-df777f1e7301", "address": "fa:16:3e:04:8d:27", "network": {"id": "fd4d9644-88d7-4c3d-9bee-9e25ca3a7d0b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2096443058-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f823912f7b1a4998a6dbc22060cf6c5e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "685b4083-b748-41fb-a68a-273b1073fa28", "external-id": "nsx-vlan-transportzone-312", "segmentation_id": 312, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap41418625-ee", "ovs_interfaceid": "41418625-ee00-4672-80fc-df777f1e7301", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1999}} [ 939.119647] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-9e86ee46-4d8c-4f28-84b2-a369260b22e3 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: e0df0ce5-1e88-4a39-8911-529b235f5b88] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:04:8d:27', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '685b4083-b748-41fb-a68a-273b1073fa28', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '41418625-ee00-4672-80fc-df777f1e7301', 'vif_model': 'vmxnet3'}] {{(pid=62503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 939.128650] env[62503]: DEBUG oslo.service.loopingcall [None req-9e86ee46-4d8c-4f28-84b2-a369260b22e3 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 939.129329] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e0df0ce5-1e88-4a39-8911-529b235f5b88] Creating VM on the ESX host {{(pid=62503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 939.129914] env[62503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-58c2f266-38e0-49e5-af5b-62d3b693913e {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.150873] env[62503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 939.150873] env[62503]: value = "task-1387981" [ 939.150873] env[62503]: _type = "Task" [ 939.150873] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 939.158453] env[62503]: DEBUG oslo_vmware.api [-] Task: {'id': task-1387981, 'name': CreateVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.279231] env[62503]: DEBUG oslo_concurrency.lockutils [None req-10c12c03-dafe-4bd9-9815-86ee70601878 tempest-InstanceActionsV221TestJSON-959512409 tempest-InstanceActionsV221TestJSON-959512409-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 939.292607] env[62503]: DEBUG oslo_vmware.api [None req-7eeeb217-fdb2-41f4-98af-448aa496c9ae tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52b5f23b-aeb4-b938-3c5b-0546a46210c1, 'name': SearchDatastore_Task, 'duration_secs': 0.007474} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 939.292876] env[62503]: DEBUG oslo_concurrency.lockutils [None req-7eeeb217-fdb2-41f4-98af-448aa496c9ae tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Releasing lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 939.293150] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-7eeeb217-fdb2-41f4-98af-448aa496c9ae tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk to [datastore1] 5ba614a3-17be-4069-8219-f88f4d27aab9/5ba614a3-17be-4069-8219-f88f4d27aab9.vmdk {{(pid=62503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 939.293410] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-addcd6c7-ae03-4fce-a8c1-9ea57c77f76d {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.300669] env[62503]: DEBUG oslo_vmware.api [None req-7eeeb217-fdb2-41f4-98af-448aa496c9ae tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Waiting for the task: (returnval){ [ 939.300669] env[62503]: value = "task-1387982" [ 939.300669] env[62503]: _type = "Task" [ 939.300669] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 939.309045] env[62503]: DEBUG oslo_vmware.api [None req-7eeeb217-fdb2-41f4-98af-448aa496c9ae tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1387982, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.386024] env[62503]: DEBUG oslo_vmware.api [None req-5d3efbe8-4cdd-4e3e-a6a1-a7d4876131b5 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Task: {'id': task-1387980, 'name': Rename_Task, 'duration_secs': 0.143151} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 939.386024] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-5d3efbe8-4cdd-4e3e-a6a1-a7d4876131b5 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: 09ff6008-f1eb-4ee0-af7d-c7f8268e3eb9] Powering on the VM {{(pid=62503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 939.386024] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d4426c0a-a783-4293-8fe8-d074017d561c {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.393571] env[62503]: DEBUG oslo_vmware.api [None req-5d3efbe8-4cdd-4e3e-a6a1-a7d4876131b5 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Waiting for the task: (returnval){ [ 939.393571] env[62503]: value = "task-1387983" [ 939.393571] env[62503]: _type = "Task" [ 939.393571] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 939.404465] env[62503]: DEBUG oslo_vmware.api [None req-5d3efbe8-4cdd-4e3e-a6a1-a7d4876131b5 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Task: {'id': task-1387983, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.432846] env[62503]: DEBUG nova.compute.manager [req-190b6463-7e61-4f0e-b39c-2cd9e3e91f0a req-9b9d2896-2d55-423a-97b3-a317723f3035 service nova] [instance: e0df0ce5-1e88-4a39-8911-529b235f5b88] Received event network-changed-41418625-ee00-4672-80fc-df777f1e7301 {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 939.433139] env[62503]: DEBUG nova.compute.manager [req-190b6463-7e61-4f0e-b39c-2cd9e3e91f0a req-9b9d2896-2d55-423a-97b3-a317723f3035 service nova] [instance: e0df0ce5-1e88-4a39-8911-529b235f5b88] Refreshing instance network info cache due to event network-changed-41418625-ee00-4672-80fc-df777f1e7301. {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11432}} [ 939.433421] env[62503]: DEBUG oslo_concurrency.lockutils [req-190b6463-7e61-4f0e-b39c-2cd9e3e91f0a req-9b9d2896-2d55-423a-97b3-a317723f3035 service nova] Acquiring lock "refresh_cache-e0df0ce5-1e88-4a39-8911-529b235f5b88" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 939.433628] env[62503]: DEBUG oslo_concurrency.lockutils [req-190b6463-7e61-4f0e-b39c-2cd9e3e91f0a req-9b9d2896-2d55-423a-97b3-a317723f3035 service nova] Acquired lock "refresh_cache-e0df0ce5-1e88-4a39-8911-529b235f5b88" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 939.434124] env[62503]: DEBUG nova.network.neutron [req-190b6463-7e61-4f0e-b39c-2cd9e3e91f0a req-9b9d2896-2d55-423a-97b3-a317723f3035 service nova] [instance: e0df0ce5-1e88-4a39-8911-529b235f5b88] Refreshing network info cache for port 41418625-ee00-4672-80fc-df777f1e7301 {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 939.620855] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da2774fb-37b7-491b-9c8c-ac5f2775b855 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.628381] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7af616a-0803-4d16-b652-b4aacd9ab505 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.663340] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e224ed2-8a9d-4242-b1ba-1e2d56fd1715 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.669956] env[62503]: DEBUG oslo_vmware.api [-] Task: {'id': task-1387981, 'name': CreateVM_Task, 'duration_secs': 0.327781} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 939.672184] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e0df0ce5-1e88-4a39-8911-529b235f5b88] Created VM on the ESX host {{(pid=62503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 939.673011] env[62503]: DEBUG oslo_concurrency.lockutils [None req-9e86ee46-4d8c-4f28-84b2-a369260b22e3 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 939.673210] env[62503]: DEBUG oslo_concurrency.lockutils [None req-9e86ee46-4d8c-4f28-84b2-a369260b22e3 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Acquired lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 939.673564] env[62503]: DEBUG oslo_concurrency.lockutils [None req-9e86ee46-4d8c-4f28-84b2-a369260b22e3 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 939.674892] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-189226fb-a088-40f2-8ea6-23fab7b941e0 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.678876] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-83bbb707-a226-445c-916b-68548788e6e0 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.690566] env[62503]: DEBUG nova.compute.provider_tree [None req-74f2d880-0894-4c8b-96f9-91645aaf1004 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 939.695651] env[62503]: DEBUG oslo_vmware.api [None req-9e86ee46-4d8c-4f28-84b2-a369260b22e3 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Waiting for the task: (returnval){ [ 939.695651] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52a07369-df75-3bec-1d7a-ef2449dc61a8" [ 939.695651] env[62503]: _type = "Task" [ 939.695651] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 939.701597] env[62503]: DEBUG oslo_vmware.api [None req-9e86ee46-4d8c-4f28-84b2-a369260b22e3 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52a07369-df75-3bec-1d7a-ef2449dc61a8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.811058] env[62503]: DEBUG oslo_vmware.api [None req-7eeeb217-fdb2-41f4-98af-448aa496c9ae tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1387982, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.446223} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 939.811058] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-7eeeb217-fdb2-41f4-98af-448aa496c9ae tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk to [datastore1] 5ba614a3-17be-4069-8219-f88f4d27aab9/5ba614a3-17be-4069-8219-f88f4d27aab9.vmdk {{(pid=62503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 939.811284] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-7eeeb217-fdb2-41f4-98af-448aa496c9ae tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: 5ba614a3-17be-4069-8219-f88f4d27aab9] Extending root virtual disk to 1048576 {{(pid=62503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 939.811389] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e469fecc-4607-4ef5-84a0-95dc3fabb528 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.817906] env[62503]: DEBUG oslo_vmware.api [None req-7eeeb217-fdb2-41f4-98af-448aa496c9ae tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Waiting for the task: (returnval){ [ 939.817906] env[62503]: value = "task-1387984" [ 939.817906] env[62503]: _type = "Task" [ 939.817906] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 939.825206] env[62503]: DEBUG oslo_vmware.api [None req-7eeeb217-fdb2-41f4-98af-448aa496c9ae tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1387984, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.902859] env[62503]: DEBUG oslo_vmware.api [None req-5d3efbe8-4cdd-4e3e-a6a1-a7d4876131b5 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Task: {'id': task-1387983, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.100317] env[62503]: DEBUG oslo_concurrency.lockutils [None req-93e53ce4-ad3e-439a-926c-0196bf562468 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Acquiring lock "7eb8e049-dd65-43bd-829a-8f773f7ad156" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 940.100542] env[62503]: DEBUG oslo_concurrency.lockutils [None req-93e53ce4-ad3e-439a-926c-0196bf562468 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Lock "7eb8e049-dd65-43bd-829a-8f773f7ad156" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 940.131175] env[62503]: DEBUG nova.network.neutron [req-190b6463-7e61-4f0e-b39c-2cd9e3e91f0a req-9b9d2896-2d55-423a-97b3-a317723f3035 service nova] [instance: e0df0ce5-1e88-4a39-8911-529b235f5b88] Updated VIF entry in instance network info cache for port 41418625-ee00-4672-80fc-df777f1e7301. {{(pid=62503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 940.131450] env[62503]: DEBUG nova.network.neutron [req-190b6463-7e61-4f0e-b39c-2cd9e3e91f0a req-9b9d2896-2d55-423a-97b3-a317723f3035 service nova] [instance: e0df0ce5-1e88-4a39-8911-529b235f5b88] Updating instance_info_cache with network_info: [{"id": "41418625-ee00-4672-80fc-df777f1e7301", "address": "fa:16:3e:04:8d:27", "network": {"id": "fd4d9644-88d7-4c3d-9bee-9e25ca3a7d0b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2096443058-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f823912f7b1a4998a6dbc22060cf6c5e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "685b4083-b748-41fb-a68a-273b1073fa28", "external-id": "nsx-vlan-transportzone-312", "segmentation_id": 312, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap41418625-ee", "ovs_interfaceid": "41418625-ee00-4672-80fc-df777f1e7301", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 940.195453] env[62503]: DEBUG nova.scheduler.client.report [None req-74f2d880-0894-4c8b-96f9-91645aaf1004 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 940.209419] env[62503]: DEBUG oslo_vmware.api [None req-9e86ee46-4d8c-4f28-84b2-a369260b22e3 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52a07369-df75-3bec-1d7a-ef2449dc61a8, 'name': SearchDatastore_Task, 'duration_secs': 0.048102} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 940.209706] env[62503]: DEBUG oslo_concurrency.lockutils [None req-9e86ee46-4d8c-4f28-84b2-a369260b22e3 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Releasing lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 940.209942] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-9e86ee46-4d8c-4f28-84b2-a369260b22e3 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: e0df0ce5-1e88-4a39-8911-529b235f5b88] Processing image 8150ca02-f879-471d-8913-459408f127a1 {{(pid=62503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 940.210364] env[62503]: DEBUG oslo_concurrency.lockutils [None req-9e86ee46-4d8c-4f28-84b2-a369260b22e3 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 940.210533] env[62503]: DEBUG oslo_concurrency.lockutils [None req-9e86ee46-4d8c-4f28-84b2-a369260b22e3 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Acquired lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 940.210720] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-9e86ee46-4d8c-4f28-84b2-a369260b22e3 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 940.211637] env[62503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5f850fa4-c884-4768-8555-afed86ad881b {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.222584] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-9e86ee46-4d8c-4f28-84b2-a369260b22e3 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 940.222868] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-9e86ee46-4d8c-4f28-84b2-a369260b22e3 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 940.223927] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4126edda-a281-4748-8a16-0faf13aa6a45 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.230106] env[62503]: DEBUG oslo_vmware.api [None req-9e86ee46-4d8c-4f28-84b2-a369260b22e3 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Waiting for the task: (returnval){ [ 940.230106] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52292813-30e5-87b5-ae47-8d0fb1e4696a" [ 940.230106] env[62503]: _type = "Task" [ 940.230106] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 940.238431] env[62503]: DEBUG oslo_vmware.api [None req-9e86ee46-4d8c-4f28-84b2-a369260b22e3 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52292813-30e5-87b5-ae47-8d0fb1e4696a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.327694] env[62503]: DEBUG oslo_vmware.api [None req-7eeeb217-fdb2-41f4-98af-448aa496c9ae tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1387984, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.062066} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 940.328169] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-7eeeb217-fdb2-41f4-98af-448aa496c9ae tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: 5ba614a3-17be-4069-8219-f88f4d27aab9] Extended root virtual disk {{(pid=62503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 940.328743] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6053d66e-bf35-4d57-90d4-25caf10c602e {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.350393] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-7eeeb217-fdb2-41f4-98af-448aa496c9ae tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: 5ba614a3-17be-4069-8219-f88f4d27aab9] Reconfiguring VM instance instance-00000055 to attach disk [datastore1] 5ba614a3-17be-4069-8219-f88f4d27aab9/5ba614a3-17be-4069-8219-f88f4d27aab9.vmdk or device None with type sparse {{(pid=62503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 940.351027] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-317881a8-cbdd-4865-94f5-fec7c2cf3e33 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.371456] env[62503]: DEBUG oslo_vmware.api [None req-7eeeb217-fdb2-41f4-98af-448aa496c9ae tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Waiting for the task: (returnval){ [ 940.371456] env[62503]: value = "task-1387985" [ 940.371456] env[62503]: _type = "Task" [ 940.371456] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 940.378974] env[62503]: DEBUG oslo_vmware.api [None req-7eeeb217-fdb2-41f4-98af-448aa496c9ae tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1387985, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.402178] env[62503]: DEBUG oslo_vmware.api [None req-5d3efbe8-4cdd-4e3e-a6a1-a7d4876131b5 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Task: {'id': task-1387983, 'name': PowerOnVM_Task, 'duration_secs': 0.986563} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 940.402427] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-5d3efbe8-4cdd-4e3e-a6a1-a7d4876131b5 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: 09ff6008-f1eb-4ee0-af7d-c7f8268e3eb9] Powered on the VM {{(pid=62503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 940.402624] env[62503]: INFO nova.compute.manager [None req-5d3efbe8-4cdd-4e3e-a6a1-a7d4876131b5 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: 09ff6008-f1eb-4ee0-af7d-c7f8268e3eb9] Took 8.04 seconds to spawn the instance on the hypervisor. [ 940.402805] env[62503]: DEBUG nova.compute.manager [None req-5d3efbe8-4cdd-4e3e-a6a1-a7d4876131b5 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: 09ff6008-f1eb-4ee0-af7d-c7f8268e3eb9] Checking state {{(pid=62503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1794}} [ 940.403544] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc21ced0-58ac-44f1-b844-faea3095501c {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.603017] env[62503]: DEBUG nova.compute.manager [None req-93e53ce4-ad3e-439a-926c-0196bf562468 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: 7eb8e049-dd65-43bd-829a-8f773f7ad156] Starting instance... {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 940.634295] env[62503]: DEBUG oslo_concurrency.lockutils [req-190b6463-7e61-4f0e-b39c-2cd9e3e91f0a req-9b9d2896-2d55-423a-97b3-a317723f3035 service nova] Releasing lock "refresh_cache-e0df0ce5-1e88-4a39-8911-529b235f5b88" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 940.704405] env[62503]: DEBUG oslo_concurrency.lockutils [None req-74f2d880-0894-4c8b-96f9-91645aaf1004 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.273s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 940.704949] env[62503]: DEBUG nova.compute.manager [None req-74f2d880-0894-4c8b-96f9-91645aaf1004 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: 68f0c60d-ceff-4d7a-b81d-4845b4c5134c] Start building networks asynchronously for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2832}} [ 940.707775] env[62503]: DEBUG oslo_concurrency.lockutils [None req-7d4fd531-7e25-4cab-99a2-9ab02a01b538 tempest-ServerTagsTestJSON-53821113 tempest-ServerTagsTestJSON-53821113-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 12.290s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 940.708035] env[62503]: DEBUG nova.objects.instance [None req-7d4fd531-7e25-4cab-99a2-9ab02a01b538 tempest-ServerTagsTestJSON-53821113 tempest-ServerTagsTestJSON-53821113-project-member] Lazy-loading 'resources' on Instance uuid c02c01b0-6f69-424f-8c0f-5a5c9ce1fad7 {{(pid=62503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 940.739964] env[62503]: DEBUG oslo_vmware.api [None req-9e86ee46-4d8c-4f28-84b2-a369260b22e3 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52292813-30e5-87b5-ae47-8d0fb1e4696a, 'name': SearchDatastore_Task, 'duration_secs': 0.015958} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 940.741324] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3323f389-bb4e-4ae8-82dc-765e4cf21a00 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.746527] env[62503]: DEBUG oslo_vmware.api [None req-9e86ee46-4d8c-4f28-84b2-a369260b22e3 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Waiting for the task: (returnval){ [ 940.746527] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]520bdab3-63d4-7dda-4627-31b6497087f4" [ 940.746527] env[62503]: _type = "Task" [ 940.746527] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 940.753779] env[62503]: DEBUG oslo_vmware.api [None req-9e86ee46-4d8c-4f28-84b2-a369260b22e3 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]520bdab3-63d4-7dda-4627-31b6497087f4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.882094] env[62503]: DEBUG oslo_vmware.api [None req-7eeeb217-fdb2-41f4-98af-448aa496c9ae tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1387985, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.921020] env[62503]: INFO nova.compute.manager [None req-5d3efbe8-4cdd-4e3e-a6a1-a7d4876131b5 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: 09ff6008-f1eb-4ee0-af7d-c7f8268e3eb9] Took 16.67 seconds to build instance. [ 941.128236] env[62503]: DEBUG oslo_concurrency.lockutils [None req-93e53ce4-ad3e-439a-926c-0196bf562468 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 941.211850] env[62503]: DEBUG nova.compute.utils [None req-74f2d880-0894-4c8b-96f9-91645aaf1004 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Using /dev/sd instead of None {{(pid=62503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 941.213156] env[62503]: DEBUG nova.compute.manager [None req-74f2d880-0894-4c8b-96f9-91645aaf1004 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: 68f0c60d-ceff-4d7a-b81d-4845b4c5134c] Allocating IP information in the background. {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 941.213359] env[62503]: DEBUG nova.network.neutron [None req-74f2d880-0894-4c8b-96f9-91645aaf1004 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: 68f0c60d-ceff-4d7a-b81d-4845b4c5134c] allocate_for_instance() {{(pid=62503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 941.256312] env[62503]: DEBUG oslo_vmware.api [None req-9e86ee46-4d8c-4f28-84b2-a369260b22e3 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]520bdab3-63d4-7dda-4627-31b6497087f4, 'name': SearchDatastore_Task, 'duration_secs': 0.013777} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 941.258694] env[62503]: DEBUG oslo_concurrency.lockutils [None req-9e86ee46-4d8c-4f28-84b2-a369260b22e3 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Releasing lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 941.259034] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e86ee46-4d8c-4f28-84b2-a369260b22e3 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk to [datastore1] e0df0ce5-1e88-4a39-8911-529b235f5b88/e0df0ce5-1e88-4a39-8911-529b235f5b88.vmdk {{(pid=62503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 941.259553] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-de0ac35c-e4bb-44d2-9ad9-29bee513b71c {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.263155] env[62503]: DEBUG nova.policy [None req-74f2d880-0894-4c8b-96f9-91645aaf1004 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0b072e4c8ef94b26895d59ede518aaad', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0849093c8b48400a8e9d56171ea99e8f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62503) authorize /opt/stack/nova/nova/policy.py:201}} [ 941.268727] env[62503]: DEBUG oslo_vmware.api [None req-9e86ee46-4d8c-4f28-84b2-a369260b22e3 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Waiting for the task: (returnval){ [ 941.268727] env[62503]: value = "task-1387986" [ 941.268727] env[62503]: _type = "Task" [ 941.268727] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.278739] env[62503]: DEBUG oslo_vmware.api [None req-9e86ee46-4d8c-4f28-84b2-a369260b22e3 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Task: {'id': task-1387986, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.382859] env[62503]: DEBUG oslo_vmware.api [None req-7eeeb217-fdb2-41f4-98af-448aa496c9ae tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1387985, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.392816] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a419284f-6620-4067-80bd-b03303cf647c {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.399562] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40efaa70-32ce-4f27-9ae7-1d49348b35e0 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.429291] env[62503]: DEBUG oslo_concurrency.lockutils [None req-5d3efbe8-4cdd-4e3e-a6a1-a7d4876131b5 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Lock "09ff6008-f1eb-4ee0-af7d-c7f8268e3eb9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.184s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 941.430208] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92f1fe7c-ec1b-4c3a-8b9c-158e44d530f2 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.437272] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12fb1364-029d-4a78-9c1f-24d3c4ff25a3 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.450198] env[62503]: DEBUG nova.compute.provider_tree [None req-7d4fd531-7e25-4cab-99a2-9ab02a01b538 tempest-ServerTagsTestJSON-53821113 tempest-ServerTagsTestJSON-53821113-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 941.526156] env[62503]: DEBUG nova.network.neutron [None req-74f2d880-0894-4c8b-96f9-91645aaf1004 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: 68f0c60d-ceff-4d7a-b81d-4845b4c5134c] Successfully created port: 89edb450-124a-47e0-b611-2b5ffb36fc2b {{(pid=62503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 941.718368] env[62503]: DEBUG nova.compute.manager [None req-74f2d880-0894-4c8b-96f9-91645aaf1004 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: 68f0c60d-ceff-4d7a-b81d-4845b4c5134c] Start building block device mappings for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2867}} [ 941.781298] env[62503]: DEBUG oslo_vmware.api [None req-9e86ee46-4d8c-4f28-84b2-a369260b22e3 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Task: {'id': task-1387986, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.884452] env[62503]: DEBUG oslo_vmware.api [None req-7eeeb217-fdb2-41f4-98af-448aa496c9ae tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1387985, 'name': ReconfigVM_Task, 'duration_secs': 1.176666} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 941.884837] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-7eeeb217-fdb2-41f4-98af-448aa496c9ae tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: 5ba614a3-17be-4069-8219-f88f4d27aab9] Reconfigured VM instance instance-00000055 to attach disk [datastore1] 5ba614a3-17be-4069-8219-f88f4d27aab9/5ba614a3-17be-4069-8219-f88f4d27aab9.vmdk or device None with type sparse {{(pid=62503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 941.885594] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3f3d1ca1-58e2-446c-b611-6d23491f92fa {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.893326] env[62503]: DEBUG oslo_vmware.api [None req-7eeeb217-fdb2-41f4-98af-448aa496c9ae tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Waiting for the task: (returnval){ [ 941.893326] env[62503]: value = "task-1387987" [ 941.893326] env[62503]: _type = "Task" [ 941.893326] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.902255] env[62503]: DEBUG oslo_vmware.api [None req-7eeeb217-fdb2-41f4-98af-448aa496c9ae tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1387987, 'name': Rename_Task} progress is 5%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.953577] env[62503]: DEBUG nova.scheduler.client.report [None req-7d4fd531-7e25-4cab-99a2-9ab02a01b538 tempest-ServerTagsTestJSON-53821113 tempest-ServerTagsTestJSON-53821113-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 942.281525] env[62503]: DEBUG oslo_vmware.api [None req-9e86ee46-4d8c-4f28-84b2-a369260b22e3 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Task: {'id': task-1387986, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.687862} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 942.281752] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e86ee46-4d8c-4f28-84b2-a369260b22e3 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk to [datastore1] e0df0ce5-1e88-4a39-8911-529b235f5b88/e0df0ce5-1e88-4a39-8911-529b235f5b88.vmdk {{(pid=62503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 942.281972] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-9e86ee46-4d8c-4f28-84b2-a369260b22e3 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: e0df0ce5-1e88-4a39-8911-529b235f5b88] Extending root virtual disk to 1048576 {{(pid=62503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 942.282247] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-404f477c-83fe-42a4-af98-0991e718656f {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.288730] env[62503]: DEBUG oslo_vmware.api [None req-9e86ee46-4d8c-4f28-84b2-a369260b22e3 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Waiting for the task: (returnval){ [ 942.288730] env[62503]: value = "task-1387988" [ 942.288730] env[62503]: _type = "Task" [ 942.288730] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 942.296234] env[62503]: DEBUG oslo_vmware.api [None req-9e86ee46-4d8c-4f28-84b2-a369260b22e3 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Task: {'id': task-1387988, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.404351] env[62503]: DEBUG oslo_vmware.api [None req-7eeeb217-fdb2-41f4-98af-448aa496c9ae tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1387987, 'name': Rename_Task, 'duration_secs': 0.149931} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 942.404649] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-7eeeb217-fdb2-41f4-98af-448aa496c9ae tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: 5ba614a3-17be-4069-8219-f88f4d27aab9] Powering on the VM {{(pid=62503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 942.404982] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-60fe93ca-7193-4ffb-a1c9-3cf71f550007 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.411254] env[62503]: DEBUG oslo_vmware.api [None req-7eeeb217-fdb2-41f4-98af-448aa496c9ae tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Waiting for the task: (returnval){ [ 942.411254] env[62503]: value = "task-1387989" [ 942.411254] env[62503]: _type = "Task" [ 942.411254] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 942.419110] env[62503]: DEBUG oslo_vmware.api [None req-7eeeb217-fdb2-41f4-98af-448aa496c9ae tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1387989, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.458897] env[62503]: DEBUG oslo_concurrency.lockutils [None req-7d4fd531-7e25-4cab-99a2-9ab02a01b538 tempest-ServerTagsTestJSON-53821113 tempest-ServerTagsTestJSON-53821113-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.751s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 942.461287] env[62503]: DEBUG oslo_concurrency.lockutils [None req-4a0ddf77-6ecd-4dd8-9f92-8098ccbc1ed1 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 12.851s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 942.461488] env[62503]: DEBUG oslo_concurrency.lockutils [None req-4a0ddf77-6ecd-4dd8-9f92-8098ccbc1ed1 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 942.463337] env[62503]: DEBUG oslo_concurrency.lockutils [None req-753652d2-1501-4933-9a2e-a2a95b2bb77c tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 10.509s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 942.463726] env[62503]: DEBUG nova.objects.instance [None req-753652d2-1501-4933-9a2e-a2a95b2bb77c tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Lazy-loading 'resources' on Instance uuid 7b8c670d-3f2a-431d-91da-4ced781e6e51 {{(pid=62503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 942.484216] env[62503]: INFO nova.scheduler.client.report [None req-4a0ddf77-6ecd-4dd8-9f92-8098ccbc1ed1 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Deleted allocations for instance c9129f68-c755-4b78-b067-b77b01048c02 [ 942.485865] env[62503]: INFO nova.scheduler.client.report [None req-7d4fd531-7e25-4cab-99a2-9ab02a01b538 tempest-ServerTagsTestJSON-53821113 tempest-ServerTagsTestJSON-53821113-project-member] Deleted allocations for instance c02c01b0-6f69-424f-8c0f-5a5c9ce1fad7 [ 942.728033] env[62503]: DEBUG nova.compute.manager [None req-74f2d880-0894-4c8b-96f9-91645aaf1004 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: 68f0c60d-ceff-4d7a-b81d-4845b4c5134c] Start spawning the instance on the hypervisor. {{(pid=62503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2641}} [ 942.749174] env[62503]: DEBUG nova.virt.hardware [None req-74f2d880-0894-4c8b-96f9-91645aaf1004 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:26:20Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:26:03Z,direct_url=,disk_format='vmdk',id=8150ca02-f879-471d-8913-459408f127a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26d9ee75d25b4018b8bb1fb11c8bd98c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:26:04Z,virtual_size=,visibility=), allow threads: False {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 942.749467] env[62503]: DEBUG nova.virt.hardware [None req-74f2d880-0894-4c8b-96f9-91645aaf1004 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Flavor limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 942.749629] env[62503]: DEBUG nova.virt.hardware [None req-74f2d880-0894-4c8b-96f9-91645aaf1004 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Image limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 942.749820] env[62503]: DEBUG nova.virt.hardware [None req-74f2d880-0894-4c8b-96f9-91645aaf1004 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Flavor pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 942.749971] env[62503]: DEBUG nova.virt.hardware [None req-74f2d880-0894-4c8b-96f9-91645aaf1004 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Image pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 942.750223] env[62503]: DEBUG nova.virt.hardware [None req-74f2d880-0894-4c8b-96f9-91645aaf1004 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 942.750347] env[62503]: DEBUG nova.virt.hardware [None req-74f2d880-0894-4c8b-96f9-91645aaf1004 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 942.750556] env[62503]: DEBUG nova.virt.hardware [None req-74f2d880-0894-4c8b-96f9-91645aaf1004 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 942.750695] env[62503]: DEBUG nova.virt.hardware [None req-74f2d880-0894-4c8b-96f9-91645aaf1004 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Got 1 possible topologies {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 942.750873] env[62503]: DEBUG nova.virt.hardware [None req-74f2d880-0894-4c8b-96f9-91645aaf1004 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 942.751167] env[62503]: DEBUG nova.virt.hardware [None req-74f2d880-0894-4c8b-96f9-91645aaf1004 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 942.752033] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17146530-057a-4707-a183-6091981a4363 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.760390] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96581c3f-4e3c-4e17-8b53-7961b428a0ae {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.797340] env[62503]: DEBUG oslo_vmware.api [None req-9e86ee46-4d8c-4f28-84b2-a369260b22e3 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Task: {'id': task-1387988, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.062012} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 942.797562] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-9e86ee46-4d8c-4f28-84b2-a369260b22e3 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: e0df0ce5-1e88-4a39-8911-529b235f5b88] Extended root virtual disk {{(pid=62503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 942.798328] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9003a689-68f7-4cb9-ac99-ce3018976581 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.820095] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-9e86ee46-4d8c-4f28-84b2-a369260b22e3 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: e0df0ce5-1e88-4a39-8911-529b235f5b88] Reconfiguring VM instance instance-00000056 to attach disk [datastore1] e0df0ce5-1e88-4a39-8911-529b235f5b88/e0df0ce5-1e88-4a39-8911-529b235f5b88.vmdk or device None with type sparse {{(pid=62503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 942.820205] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-91f7b282-bbe4-48a7-a372-47ac18c7877e {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.839977] env[62503]: DEBUG oslo_vmware.api [None req-9e86ee46-4d8c-4f28-84b2-a369260b22e3 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Waiting for the task: (returnval){ [ 942.839977] env[62503]: value = "task-1387990" [ 942.839977] env[62503]: _type = "Task" [ 942.839977] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 942.847716] env[62503]: DEBUG oslo_vmware.api [None req-9e86ee46-4d8c-4f28-84b2-a369260b22e3 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Task: {'id': task-1387990, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.923509] env[62503]: DEBUG oslo_vmware.api [None req-7eeeb217-fdb2-41f4-98af-448aa496c9ae tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1387989, 'name': PowerOnVM_Task} progress is 78%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.993621] env[62503]: DEBUG oslo_concurrency.lockutils [None req-4a0ddf77-6ecd-4dd8-9f92-8098ccbc1ed1 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Lock "c9129f68-c755-4b78-b067-b77b01048c02" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 16.265s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 942.994624] env[62503]: DEBUG oslo_concurrency.lockutils [None req-7d4fd531-7e25-4cab-99a2-9ab02a01b538 tempest-ServerTagsTestJSON-53821113 tempest-ServerTagsTestJSON-53821113-project-member] Lock "c02c01b0-6f69-424f-8c0f-5a5c9ce1fad7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 17.457s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 943.111539] env[62503]: DEBUG oslo_service.periodic_task [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 943.111912] env[62503]: DEBUG oslo_service.periodic_task [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 943.112048] env[62503]: DEBUG nova.compute.manager [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Starting heal instance info cache {{(pid=62503) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10224}} [ 943.112501] env[62503]: DEBUG nova.compute.manager [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Rebuilding the list of instances to heal {{(pid=62503) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10228}} [ 943.128143] env[62503]: DEBUG nova.compute.manager [req-da60b6ad-49d7-4cab-9874-bdfeb3833abb req-b151e2e6-313c-4d5f-8c44-eb3dbb24bb2b service nova] [instance: 68f0c60d-ceff-4d7a-b81d-4845b4c5134c] Received event network-vif-plugged-89edb450-124a-47e0-b611-2b5ffb36fc2b {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 943.128397] env[62503]: DEBUG oslo_concurrency.lockutils [req-da60b6ad-49d7-4cab-9874-bdfeb3833abb req-b151e2e6-313c-4d5f-8c44-eb3dbb24bb2b service nova] Acquiring lock "68f0c60d-ceff-4d7a-b81d-4845b4c5134c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 943.128631] env[62503]: DEBUG oslo_concurrency.lockutils [req-da60b6ad-49d7-4cab-9874-bdfeb3833abb req-b151e2e6-313c-4d5f-8c44-eb3dbb24bb2b service nova] Lock "68f0c60d-ceff-4d7a-b81d-4845b4c5134c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 943.128896] env[62503]: DEBUG oslo_concurrency.lockutils [req-da60b6ad-49d7-4cab-9874-bdfeb3833abb req-b151e2e6-313c-4d5f-8c44-eb3dbb24bb2b service nova] Lock "68f0c60d-ceff-4d7a-b81d-4845b4c5134c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 943.128984] env[62503]: DEBUG nova.compute.manager [req-da60b6ad-49d7-4cab-9874-bdfeb3833abb req-b151e2e6-313c-4d5f-8c44-eb3dbb24bb2b service nova] [instance: 68f0c60d-ceff-4d7a-b81d-4845b4c5134c] No waiting events found dispatching network-vif-plugged-89edb450-124a-47e0-b611-2b5ffb36fc2b {{(pid=62503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 943.129162] env[62503]: WARNING nova.compute.manager [req-da60b6ad-49d7-4cab-9874-bdfeb3833abb req-b151e2e6-313c-4d5f-8c44-eb3dbb24bb2b service nova] [instance: 68f0c60d-ceff-4d7a-b81d-4845b4c5134c] Received unexpected event network-vif-plugged-89edb450-124a-47e0-b611-2b5ffb36fc2b for instance with vm_state building and task_state spawning. [ 943.145981] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60cbc351-ed62-4e68-b928-edf850893b29 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.153473] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ac7a2e0-2bd6-4d2a-bd56-f6c363af9134 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.183771] env[62503]: DEBUG nova.compute.manager [None req-20ed2247-b43b-423a-b30c-af70223d8eff tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: 09ff6008-f1eb-4ee0-af7d-c7f8268e3eb9] Checking state {{(pid=62503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1794}} [ 943.184774] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5f929ce-bb74-4a84-bcf9-d12e57d1b79c {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.187778] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-789215bd-424d-4079-a83d-5487fec89a51 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.198878] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2de0660-e1e0-44c6-899c-1deef2180bba {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.211974] env[62503]: DEBUG nova.compute.provider_tree [None req-753652d2-1501-4933-9a2e-a2a95b2bb77c tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 943.277519] env[62503]: DEBUG nova.network.neutron [None req-74f2d880-0894-4c8b-96f9-91645aaf1004 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: 68f0c60d-ceff-4d7a-b81d-4845b4c5134c] Successfully updated port: 89edb450-124a-47e0-b611-2b5ffb36fc2b {{(pid=62503) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 943.349436] env[62503]: DEBUG oslo_vmware.api [None req-9e86ee46-4d8c-4f28-84b2-a369260b22e3 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Task: {'id': task-1387990, 'name': ReconfigVM_Task, 'duration_secs': 0.296467} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 943.349719] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-9e86ee46-4d8c-4f28-84b2-a369260b22e3 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: e0df0ce5-1e88-4a39-8911-529b235f5b88] Reconfigured VM instance instance-00000056 to attach disk [datastore1] e0df0ce5-1e88-4a39-8911-529b235f5b88/e0df0ce5-1e88-4a39-8911-529b235f5b88.vmdk or device None with type sparse {{(pid=62503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 943.350377] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5de784ad-ad34-49c0-8d66-52fe620b0c09 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.356660] env[62503]: DEBUG oslo_vmware.api [None req-9e86ee46-4d8c-4f28-84b2-a369260b22e3 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Waiting for the task: (returnval){ [ 943.356660] env[62503]: value = "task-1387991" [ 943.356660] env[62503]: _type = "Task" [ 943.356660] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 943.363964] env[62503]: DEBUG oslo_vmware.api [None req-9e86ee46-4d8c-4f28-84b2-a369260b22e3 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Task: {'id': task-1387991, 'name': Rename_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.421962] env[62503]: DEBUG oslo_vmware.api [None req-7eeeb217-fdb2-41f4-98af-448aa496c9ae tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1387989, 'name': PowerOnVM_Task, 'duration_secs': 0.710284} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 943.422315] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-7eeeb217-fdb2-41f4-98af-448aa496c9ae tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: 5ba614a3-17be-4069-8219-f88f4d27aab9] Powered on the VM {{(pid=62503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 943.422463] env[62503]: INFO nova.compute.manager [None req-7eeeb217-fdb2-41f4-98af-448aa496c9ae tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: 5ba614a3-17be-4069-8219-f88f4d27aab9] Took 8.78 seconds to spawn the instance on the hypervisor. [ 943.422709] env[62503]: DEBUG nova.compute.manager [None req-7eeeb217-fdb2-41f4-98af-448aa496c9ae tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: 5ba614a3-17be-4069-8219-f88f4d27aab9] Checking state {{(pid=62503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1794}} [ 943.423423] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dcc2914-5c41-40ab-8d43-f78082c968d9 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.620058] env[62503]: DEBUG nova.compute.manager [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] [instance: 5ba614a3-17be-4069-8219-f88f4d27aab9] Skipping network cache update for instance because it is Building. {{(pid=62503) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10237}} [ 943.620233] env[62503]: DEBUG nova.compute.manager [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] [instance: e0df0ce5-1e88-4a39-8911-529b235f5b88] Skipping network cache update for instance because it is Building. {{(pid=62503) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10237}} [ 943.620364] env[62503]: DEBUG nova.compute.manager [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] [instance: 68f0c60d-ceff-4d7a-b81d-4845b4c5134c] Skipping network cache update for instance because it is Building. {{(pid=62503) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10237}} [ 943.647557] env[62503]: DEBUG oslo_concurrency.lockutils [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Acquiring lock "refresh_cache-b6fddb0d-70f5-433f-a0ef-0d6bffb35579" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 943.647699] env[62503]: DEBUG oslo_concurrency.lockutils [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Acquired lock "refresh_cache-b6fddb0d-70f5-433f-a0ef-0d6bffb35579" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 943.647845] env[62503]: DEBUG nova.network.neutron [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] [instance: b6fddb0d-70f5-433f-a0ef-0d6bffb35579] Forcefully refreshing network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 943.648013] env[62503]: DEBUG nova.objects.instance [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Lazy-loading 'info_cache' on Instance uuid b6fddb0d-70f5-433f-a0ef-0d6bffb35579 {{(pid=62503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 943.699339] env[62503]: INFO nova.compute.manager [None req-20ed2247-b43b-423a-b30c-af70223d8eff tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: 09ff6008-f1eb-4ee0-af7d-c7f8268e3eb9] instance snapshotting [ 943.705017] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-013ed735-4f8e-4802-a3f4-85c20d333fa2 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.722529] env[62503]: DEBUG nova.scheduler.client.report [None req-753652d2-1501-4933-9a2e-a2a95b2bb77c tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 943.726608] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f9770c9-afbd-41c5-a3e6-cad99f924d20 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.781184] env[62503]: DEBUG oslo_concurrency.lockutils [None req-74f2d880-0894-4c8b-96f9-91645aaf1004 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Acquiring lock "refresh_cache-68f0c60d-ceff-4d7a-b81d-4845b4c5134c" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 943.781797] env[62503]: DEBUG oslo_concurrency.lockutils [None req-74f2d880-0894-4c8b-96f9-91645aaf1004 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Acquired lock "refresh_cache-68f0c60d-ceff-4d7a-b81d-4845b4c5134c" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 943.782347] env[62503]: DEBUG nova.network.neutron [None req-74f2d880-0894-4c8b-96f9-91645aaf1004 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: 68f0c60d-ceff-4d7a-b81d-4845b4c5134c] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 943.865991] env[62503]: DEBUG oslo_vmware.api [None req-9e86ee46-4d8c-4f28-84b2-a369260b22e3 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Task: {'id': task-1387991, 'name': Rename_Task, 'duration_secs': 0.1304} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 943.866294] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e86ee46-4d8c-4f28-84b2-a369260b22e3 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: e0df0ce5-1e88-4a39-8911-529b235f5b88] Powering on the VM {{(pid=62503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 943.866544] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-609f7c21-5236-4fe0-b9c7-3236f379ef27 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.872655] env[62503]: DEBUG oslo_vmware.api [None req-9e86ee46-4d8c-4f28-84b2-a369260b22e3 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Waiting for the task: (returnval){ [ 943.872655] env[62503]: value = "task-1387992" [ 943.872655] env[62503]: _type = "Task" [ 943.872655] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 943.880472] env[62503]: DEBUG oslo_vmware.api [None req-9e86ee46-4d8c-4f28-84b2-a369260b22e3 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Task: {'id': task-1387992, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.942829] env[62503]: INFO nova.compute.manager [None req-7eeeb217-fdb2-41f4-98af-448aa496c9ae tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: 5ba614a3-17be-4069-8219-f88f4d27aab9] Took 19.57 seconds to build instance. [ 944.232439] env[62503]: DEBUG oslo_concurrency.lockutils [None req-753652d2-1501-4933-9a2e-a2a95b2bb77c tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.767s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 944.233248] env[62503]: DEBUG oslo_concurrency.lockutils [None req-1f603411-3ca3-43e0-a6ad-82a61ccb75d8 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.299s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 944.235173] env[62503]: INFO nova.compute.claims [None req-1f603411-3ca3-43e0-a6ad-82a61ccb75d8 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: ce9c4d77-6cbe-411d-a0fd-d77a6d6f36b7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 944.238400] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-20ed2247-b43b-423a-b30c-af70223d8eff tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: 09ff6008-f1eb-4ee0-af7d-c7f8268e3eb9] Creating Snapshot of the VM instance {{(pid=62503) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 944.239045] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-3ae37c42-a612-44ac-9fe0-0e8727a42e97 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.246879] env[62503]: DEBUG oslo_vmware.api [None req-20ed2247-b43b-423a-b30c-af70223d8eff tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Waiting for the task: (returnval){ [ 944.246879] env[62503]: value = "task-1387993" [ 944.246879] env[62503]: _type = "Task" [ 944.246879] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 944.258661] env[62503]: DEBUG oslo_vmware.api [None req-20ed2247-b43b-423a-b30c-af70223d8eff tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Task: {'id': task-1387993, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.259651] env[62503]: INFO nova.scheduler.client.report [None req-753652d2-1501-4933-9a2e-a2a95b2bb77c tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Deleted allocations for instance 7b8c670d-3f2a-431d-91da-4ced781e6e51 [ 944.314573] env[62503]: DEBUG oslo_concurrency.lockutils [None req-bcaa8809-86dd-48c3-a137-03edfa93ef8e tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Acquiring lock "5ba614a3-17be-4069-8219-f88f4d27aab9" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 944.318840] env[62503]: DEBUG nova.network.neutron [None req-74f2d880-0894-4c8b-96f9-91645aaf1004 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: 68f0c60d-ceff-4d7a-b81d-4845b4c5134c] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 944.384662] env[62503]: DEBUG oslo_vmware.api [None req-9e86ee46-4d8c-4f28-84b2-a369260b22e3 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Task: {'id': task-1387992, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.448019] env[62503]: DEBUG oslo_concurrency.lockutils [None req-7eeeb217-fdb2-41f4-98af-448aa496c9ae tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Lock "5ba614a3-17be-4069-8219-f88f4d27aab9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.084s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 944.448019] env[62503]: DEBUG oslo_concurrency.lockutils [None req-bcaa8809-86dd-48c3-a137-03edfa93ef8e tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Lock "5ba614a3-17be-4069-8219-f88f4d27aab9" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.131s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 944.448019] env[62503]: DEBUG nova.compute.manager [None req-bcaa8809-86dd-48c3-a137-03edfa93ef8e tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: 5ba614a3-17be-4069-8219-f88f4d27aab9] Checking state {{(pid=62503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1794}} [ 944.448019] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b51e605f-8793-4c9f-b93b-e04a4c8f89d6 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.455018] env[62503]: DEBUG nova.compute.manager [None req-bcaa8809-86dd-48c3-a137-03edfa93ef8e tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: 5ba614a3-17be-4069-8219-f88f4d27aab9] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62503) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3375}} [ 944.455018] env[62503]: DEBUG nova.objects.instance [None req-bcaa8809-86dd-48c3-a137-03edfa93ef8e tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Lazy-loading 'flavor' on Instance uuid 5ba614a3-17be-4069-8219-f88f4d27aab9 {{(pid=62503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 944.456292] env[62503]: DEBUG nova.network.neutron [None req-74f2d880-0894-4c8b-96f9-91645aaf1004 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: 68f0c60d-ceff-4d7a-b81d-4845b4c5134c] Updating instance_info_cache with network_info: [{"id": "89edb450-124a-47e0-b611-2b5ffb36fc2b", "address": "fa:16:3e:79:e1:eb", "network": {"id": "8feb2896-b13e-4845-9a81-881bf941f703", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1117860439-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0849093c8b48400a8e9d56171ea99e8f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92cdccfd-4b10-4024-b724-5f22792dd4de", "external-id": "nsx-vlan-transportzone-902", "segmentation_id": 902, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap89edb450-12", "ovs_interfaceid": "89edb450-124a-47e0-b611-2b5ffb36fc2b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 944.757953] env[62503]: DEBUG oslo_vmware.api [None req-20ed2247-b43b-423a-b30c-af70223d8eff tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Task: {'id': task-1387993, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.768281] env[62503]: DEBUG oslo_concurrency.lockutils [None req-753652d2-1501-4933-9a2e-a2a95b2bb77c tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Lock "7b8c670d-3f2a-431d-91da-4ced781e6e51" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 15.678s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 944.884226] env[62503]: DEBUG oslo_vmware.api [None req-9e86ee46-4d8c-4f28-84b2-a369260b22e3 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Task: {'id': task-1387992, 'name': PowerOnVM_Task, 'duration_secs': 0.826213} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 944.884329] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e86ee46-4d8c-4f28-84b2-a369260b22e3 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: e0df0ce5-1e88-4a39-8911-529b235f5b88] Powered on the VM {{(pid=62503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 944.884996] env[62503]: INFO nova.compute.manager [None req-9e86ee46-4d8c-4f28-84b2-a369260b22e3 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: e0df0ce5-1e88-4a39-8911-529b235f5b88] Took 7.98 seconds to spawn the instance on the hypervisor. [ 944.884996] env[62503]: DEBUG nova.compute.manager [None req-9e86ee46-4d8c-4f28-84b2-a369260b22e3 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: e0df0ce5-1e88-4a39-8911-529b235f5b88] Checking state {{(pid=62503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1794}} [ 944.885636] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d60a57a6-25a9-440f-b9e4-8006d3c830b5 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.961119] env[62503]: DEBUG oslo_concurrency.lockutils [None req-74f2d880-0894-4c8b-96f9-91645aaf1004 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Releasing lock "refresh_cache-68f0c60d-ceff-4d7a-b81d-4845b4c5134c" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 944.961724] env[62503]: DEBUG nova.compute.manager [None req-74f2d880-0894-4c8b-96f9-91645aaf1004 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: 68f0c60d-ceff-4d7a-b81d-4845b4c5134c] Instance network_info: |[{"id": "89edb450-124a-47e0-b611-2b5ffb36fc2b", "address": "fa:16:3e:79:e1:eb", "network": {"id": "8feb2896-b13e-4845-9a81-881bf941f703", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1117860439-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0849093c8b48400a8e9d56171ea99e8f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92cdccfd-4b10-4024-b724-5f22792dd4de", "external-id": "nsx-vlan-transportzone-902", "segmentation_id": 902, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap89edb450-12", "ovs_interfaceid": "89edb450-124a-47e0-b611-2b5ffb36fc2b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1999}} [ 944.963793] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-74f2d880-0894-4c8b-96f9-91645aaf1004 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: 68f0c60d-ceff-4d7a-b81d-4845b4c5134c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:79:e1:eb', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '92cdccfd-4b10-4024-b724-5f22792dd4de', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '89edb450-124a-47e0-b611-2b5ffb36fc2b', 'vif_model': 'vmxnet3'}] {{(pid=62503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 944.973529] env[62503]: DEBUG oslo.service.loopingcall [None req-74f2d880-0894-4c8b-96f9-91645aaf1004 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 944.974857] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 68f0c60d-ceff-4d7a-b81d-4845b4c5134c] Creating VM on the ESX host {{(pid=62503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 944.975137] env[62503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-04dcf366-0e0e-436b-bca2-3711556080bb {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.998651] env[62503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 944.998651] env[62503]: value = "task-1387994" [ 944.998651] env[62503]: _type = "Task" [ 944.998651] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.009902] env[62503]: DEBUG oslo_vmware.api [-] Task: {'id': task-1387994, 'name': CreateVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.155078] env[62503]: DEBUG nova.compute.manager [req-73b3775d-5afb-419d-bf20-ee72e48eaec5 req-0b1da619-cea5-4d07-a49e-0c110deccfc1 service nova] [instance: 68f0c60d-ceff-4d7a-b81d-4845b4c5134c] Received event network-changed-89edb450-124a-47e0-b611-2b5ffb36fc2b {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 945.155078] env[62503]: DEBUG nova.compute.manager [req-73b3775d-5afb-419d-bf20-ee72e48eaec5 req-0b1da619-cea5-4d07-a49e-0c110deccfc1 service nova] [instance: 68f0c60d-ceff-4d7a-b81d-4845b4c5134c] Refreshing instance network info cache due to event network-changed-89edb450-124a-47e0-b611-2b5ffb36fc2b. {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11432}} [ 945.155446] env[62503]: DEBUG oslo_concurrency.lockutils [req-73b3775d-5afb-419d-bf20-ee72e48eaec5 req-0b1da619-cea5-4d07-a49e-0c110deccfc1 service nova] Acquiring lock "refresh_cache-68f0c60d-ceff-4d7a-b81d-4845b4c5134c" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 945.155828] env[62503]: DEBUG oslo_concurrency.lockutils [req-73b3775d-5afb-419d-bf20-ee72e48eaec5 req-0b1da619-cea5-4d07-a49e-0c110deccfc1 service nova] Acquired lock "refresh_cache-68f0c60d-ceff-4d7a-b81d-4845b4c5134c" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 945.155828] env[62503]: DEBUG nova.network.neutron [req-73b3775d-5afb-419d-bf20-ee72e48eaec5 req-0b1da619-cea5-4d07-a49e-0c110deccfc1 service nova] [instance: 68f0c60d-ceff-4d7a-b81d-4845b4c5134c] Refreshing network info cache for port 89edb450-124a-47e0-b611-2b5ffb36fc2b {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 945.262230] env[62503]: DEBUG oslo_vmware.api [None req-20ed2247-b43b-423a-b30c-af70223d8eff tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Task: {'id': task-1387993, 'name': CreateSnapshot_Task, 'duration_secs': 0.778922} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 945.262444] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-20ed2247-b43b-423a-b30c-af70223d8eff tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: 09ff6008-f1eb-4ee0-af7d-c7f8268e3eb9] Created Snapshot of the VM instance {{(pid=62503) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 945.263245] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c637c1f0-c475-4e28-8c76-0737b20ec789 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.401659] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c10ad95e-a69d-435e-ae47-7e52099f49bf {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.405564] env[62503]: INFO nova.compute.manager [None req-9e86ee46-4d8c-4f28-84b2-a369260b22e3 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: e0df0ce5-1e88-4a39-8911-529b235f5b88] Took 20.25 seconds to build instance. [ 945.409620] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3af38227-f241-431f-af28-61d41d506cfa {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.439625] env[62503]: DEBUG nova.network.neutron [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] [instance: b6fddb0d-70f5-433f-a0ef-0d6bffb35579] Updating instance_info_cache with network_info: [{"id": "24ac3187-6729-47ea-beb6-4c96018b8a05", "address": "fa:16:3e:f9:f1:4f", "network": {"id": "1705446b-db63-4b14-9929-b692ca586b36", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1437181237-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.196", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e818e5ee9dc24efa96747c9558514a15", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "77aa121f-8fb6-42f3-aaea-43addfe449b2", "external-id": "nsx-vlan-transportzone-288", "segmentation_id": 288, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap24ac3187-67", "ovs_interfaceid": "24ac3187-6729-47ea-beb6-4c96018b8a05", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 945.441614] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c36fd40-ab2c-4264-9e42-5aaea7a5db0c {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.449188] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31d8c0a4-7ade-49ea-bd19-2424fba9635c {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.462686] env[62503]: DEBUG nova.compute.provider_tree [None req-1f603411-3ca3-43e0-a6ad-82a61ccb75d8 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 945.474986] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-bcaa8809-86dd-48c3-a137-03edfa93ef8e tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: 5ba614a3-17be-4069-8219-f88f4d27aab9] Powering off the VM {{(pid=62503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 945.475441] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7ed2de4e-fd2b-409e-b745-62a1c3ef01b8 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.481200] env[62503]: DEBUG oslo_vmware.api [None req-bcaa8809-86dd-48c3-a137-03edfa93ef8e tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Waiting for the task: (returnval){ [ 945.481200] env[62503]: value = "task-1387995" [ 945.481200] env[62503]: _type = "Task" [ 945.481200] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.488557] env[62503]: DEBUG oslo_vmware.api [None req-bcaa8809-86dd-48c3-a137-03edfa93ef8e tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1387995, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.496462] env[62503]: DEBUG oslo_concurrency.lockutils [None req-52c126ff-63e1-418c-a270-6d77b9f52bbd tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Acquiring lock "629054bb-8fdb-45a2-8c07-216c4104d4a6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 945.496822] env[62503]: DEBUG oslo_concurrency.lockutils [None req-52c126ff-63e1-418c-a270-6d77b9f52bbd tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Lock "629054bb-8fdb-45a2-8c07-216c4104d4a6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 945.497143] env[62503]: DEBUG oslo_concurrency.lockutils [None req-52c126ff-63e1-418c-a270-6d77b9f52bbd tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Acquiring lock "629054bb-8fdb-45a2-8c07-216c4104d4a6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 945.497439] env[62503]: DEBUG oslo_concurrency.lockutils [None req-52c126ff-63e1-418c-a270-6d77b9f52bbd tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Lock "629054bb-8fdb-45a2-8c07-216c4104d4a6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 945.497702] env[62503]: DEBUG oslo_concurrency.lockutils [None req-52c126ff-63e1-418c-a270-6d77b9f52bbd tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Lock "629054bb-8fdb-45a2-8c07-216c4104d4a6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 945.504098] env[62503]: INFO nova.compute.manager [None req-52c126ff-63e1-418c-a270-6d77b9f52bbd tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] [instance: 629054bb-8fdb-45a2-8c07-216c4104d4a6] Terminating instance [ 945.506810] env[62503]: DEBUG nova.compute.manager [None req-52c126ff-63e1-418c-a270-6d77b9f52bbd tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] [instance: 629054bb-8fdb-45a2-8c07-216c4104d4a6] Start destroying the instance on the hypervisor. {{(pid=62503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3156}} [ 945.507099] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-52c126ff-63e1-418c-a270-6d77b9f52bbd tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] [instance: 629054bb-8fdb-45a2-8c07-216c4104d4a6] Destroying instance {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 945.508512] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f4b7f46-0065-4251-8bd4-d76613f8ea82 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.516414] env[62503]: DEBUG oslo_vmware.api [-] Task: {'id': task-1387994, 'name': CreateVM_Task, 'duration_secs': 0.317486} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 945.517012] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 68f0c60d-ceff-4d7a-b81d-4845b4c5134c] Created VM on the ESX host {{(pid=62503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 945.517818] env[62503]: DEBUG oslo_concurrency.lockutils [None req-74f2d880-0894-4c8b-96f9-91645aaf1004 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 945.518073] env[62503]: DEBUG oslo_concurrency.lockutils [None req-74f2d880-0894-4c8b-96f9-91645aaf1004 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 945.518510] env[62503]: DEBUG oslo_concurrency.lockutils [None req-74f2d880-0894-4c8b-96f9-91645aaf1004 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 945.518874] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6eec91cd-a238-4ed6-aa73-fd4f107081fe {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.523661] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-52c126ff-63e1-418c-a270-6d77b9f52bbd tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] [instance: 629054bb-8fdb-45a2-8c07-216c4104d4a6] Powering off the VM {{(pid=62503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 945.524371] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-483a5ead-ca5d-47f3-a829-4430d767d502 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.527600] env[62503]: DEBUG oslo_vmware.api [None req-74f2d880-0894-4c8b-96f9-91645aaf1004 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Waiting for the task: (returnval){ [ 945.527600] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52c3c064-17fa-ba4c-a873-7619ff965143" [ 945.527600] env[62503]: _type = "Task" [ 945.527600] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.533622] env[62503]: DEBUG oslo_vmware.api [None req-52c126ff-63e1-418c-a270-6d77b9f52bbd tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Waiting for the task: (returnval){ [ 945.533622] env[62503]: value = "task-1387996" [ 945.533622] env[62503]: _type = "Task" [ 945.533622] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.539641] env[62503]: DEBUG oslo_vmware.api [None req-74f2d880-0894-4c8b-96f9-91645aaf1004 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52c3c064-17fa-ba4c-a873-7619ff965143, 'name': SearchDatastore_Task, 'duration_secs': 0.008708} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 945.540524] env[62503]: DEBUG oslo_concurrency.lockutils [None req-74f2d880-0894-4c8b-96f9-91645aaf1004 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 945.540706] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-74f2d880-0894-4c8b-96f9-91645aaf1004 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: 68f0c60d-ceff-4d7a-b81d-4845b4c5134c] Processing image 8150ca02-f879-471d-8913-459408f127a1 {{(pid=62503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 945.540942] env[62503]: DEBUG oslo_concurrency.lockutils [None req-74f2d880-0894-4c8b-96f9-91645aaf1004 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 945.541158] env[62503]: DEBUG oslo_concurrency.lockutils [None req-74f2d880-0894-4c8b-96f9-91645aaf1004 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 945.541378] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-74f2d880-0894-4c8b-96f9-91645aaf1004 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 945.544131] env[62503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b2f462dd-d03f-4fde-b2d2-a3ff51f85c27 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.546060] env[62503]: DEBUG oslo_vmware.api [None req-52c126ff-63e1-418c-a270-6d77b9f52bbd tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Task: {'id': task-1387996, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.552884] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-74f2d880-0894-4c8b-96f9-91645aaf1004 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 945.553083] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-74f2d880-0894-4c8b-96f9-91645aaf1004 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 945.553825] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-75927cf6-8df9-40c8-bcd6-b26ba0529252 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.558839] env[62503]: DEBUG oslo_vmware.api [None req-74f2d880-0894-4c8b-96f9-91645aaf1004 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Waiting for the task: (returnval){ [ 945.558839] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52652c00-1a68-249b-57fd-dc226d5fe1cd" [ 945.558839] env[62503]: _type = "Task" [ 945.558839] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.566072] env[62503]: DEBUG oslo_vmware.api [None req-74f2d880-0894-4c8b-96f9-91645aaf1004 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52652c00-1a68-249b-57fd-dc226d5fe1cd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.787262] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-20ed2247-b43b-423a-b30c-af70223d8eff tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: 09ff6008-f1eb-4ee0-af7d-c7f8268e3eb9] Creating linked-clone VM from snapshot {{(pid=62503) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 945.787262] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-26cc54ef-bac3-493a-a83f-efec776a212d {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.795509] env[62503]: DEBUG oslo_vmware.api [None req-20ed2247-b43b-423a-b30c-af70223d8eff tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Waiting for the task: (returnval){ [ 945.795509] env[62503]: value = "task-1387997" [ 945.795509] env[62503]: _type = "Task" [ 945.795509] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.809051] env[62503]: DEBUG oslo_vmware.api [None req-20ed2247-b43b-423a-b30c-af70223d8eff tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Task: {'id': task-1387997, 'name': CloneVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.867858] env[62503]: DEBUG nova.network.neutron [req-73b3775d-5afb-419d-bf20-ee72e48eaec5 req-0b1da619-cea5-4d07-a49e-0c110deccfc1 service nova] [instance: 68f0c60d-ceff-4d7a-b81d-4845b4c5134c] Updated VIF entry in instance network info cache for port 89edb450-124a-47e0-b611-2b5ffb36fc2b. {{(pid=62503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 945.868314] env[62503]: DEBUG nova.network.neutron [req-73b3775d-5afb-419d-bf20-ee72e48eaec5 req-0b1da619-cea5-4d07-a49e-0c110deccfc1 service nova] [instance: 68f0c60d-ceff-4d7a-b81d-4845b4c5134c] Updating instance_info_cache with network_info: [{"id": "89edb450-124a-47e0-b611-2b5ffb36fc2b", "address": "fa:16:3e:79:e1:eb", "network": {"id": "8feb2896-b13e-4845-9a81-881bf941f703", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1117860439-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0849093c8b48400a8e9d56171ea99e8f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92cdccfd-4b10-4024-b724-5f22792dd4de", "external-id": "nsx-vlan-transportzone-902", "segmentation_id": 902, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap89edb450-12", "ovs_interfaceid": "89edb450-124a-47e0-b611-2b5ffb36fc2b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 945.907495] env[62503]: DEBUG oslo_concurrency.lockutils [None req-9e86ee46-4d8c-4f28-84b2-a369260b22e3 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Lock "e0df0ce5-1e88-4a39-8911-529b235f5b88" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.756s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 945.945310] env[62503]: DEBUG oslo_concurrency.lockutils [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Releasing lock "refresh_cache-b6fddb0d-70f5-433f-a0ef-0d6bffb35579" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 945.945525] env[62503]: DEBUG nova.compute.manager [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] [instance: b6fddb0d-70f5-433f-a0ef-0d6bffb35579] Updated the network info_cache for instance {{(pid=62503) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10295}} [ 945.945734] env[62503]: DEBUG oslo_service.periodic_task [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 945.945991] env[62503]: DEBUG oslo_service.periodic_task [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 945.946270] env[62503]: DEBUG oslo_service.periodic_task [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 945.946476] env[62503]: DEBUG oslo_service.periodic_task [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 945.946671] env[62503]: DEBUG oslo_service.periodic_task [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 945.946888] env[62503]: DEBUG oslo_service.periodic_task [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 945.946995] env[62503]: DEBUG nova.compute.manager [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62503) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10843}} [ 945.947159] env[62503]: DEBUG oslo_service.periodic_task [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 945.965364] env[62503]: DEBUG nova.scheduler.client.report [None req-1f603411-3ca3-43e0-a6ad-82a61ccb75d8 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 945.991361] env[62503]: DEBUG oslo_vmware.api [None req-bcaa8809-86dd-48c3-a137-03edfa93ef8e tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1387995, 'name': PowerOffVM_Task, 'duration_secs': 0.215171} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 945.991635] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-bcaa8809-86dd-48c3-a137-03edfa93ef8e tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: 5ba614a3-17be-4069-8219-f88f4d27aab9] Powered off the VM {{(pid=62503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 945.991834] env[62503]: DEBUG nova.compute.manager [None req-bcaa8809-86dd-48c3-a137-03edfa93ef8e tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: 5ba614a3-17be-4069-8219-f88f4d27aab9] Checking state {{(pid=62503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1794}} [ 945.992594] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9666a2f4-d28f-4d04-8ada-ea9fcd00daf4 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.043588] env[62503]: DEBUG oslo_vmware.api [None req-52c126ff-63e1-418c-a270-6d77b9f52bbd tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Task: {'id': task-1387996, 'name': PowerOffVM_Task, 'duration_secs': 0.164586} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 946.044111] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-52c126ff-63e1-418c-a270-6d77b9f52bbd tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] [instance: 629054bb-8fdb-45a2-8c07-216c4104d4a6] Powered off the VM {{(pid=62503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 946.044301] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-52c126ff-63e1-418c-a270-6d77b9f52bbd tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] [instance: 629054bb-8fdb-45a2-8c07-216c4104d4a6] Unregistering the VM {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 946.044550] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7e41887e-6978-43ba-9375-5f0213d5adcf {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.068769] env[62503]: DEBUG oslo_vmware.api [None req-74f2d880-0894-4c8b-96f9-91645aaf1004 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52652c00-1a68-249b-57fd-dc226d5fe1cd, 'name': SearchDatastore_Task, 'duration_secs': 0.010767} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 946.069560] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-43a5485b-e526-49c3-a7ed-0826d83e181b {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.074916] env[62503]: DEBUG oslo_vmware.api [None req-74f2d880-0894-4c8b-96f9-91645aaf1004 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Waiting for the task: (returnval){ [ 946.074916] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52003e19-403a-30a5-4312-8ef6295c4d19" [ 946.074916] env[62503]: _type = "Task" [ 946.074916] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 946.082672] env[62503]: DEBUG oslo_concurrency.lockutils [None req-f8d2fb1f-9ea0-4c7f-b322-3fd5d1fad38c tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Acquiring lock "e0df0ce5-1e88-4a39-8911-529b235f5b88" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 946.082917] env[62503]: DEBUG oslo_concurrency.lockutils [None req-f8d2fb1f-9ea0-4c7f-b322-3fd5d1fad38c tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Lock "e0df0ce5-1e88-4a39-8911-529b235f5b88" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 946.083105] env[62503]: DEBUG nova.compute.manager [None req-f8d2fb1f-9ea0-4c7f-b322-3fd5d1fad38c tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: e0df0ce5-1e88-4a39-8911-529b235f5b88] Checking state {{(pid=62503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1794}} [ 946.083378] env[62503]: DEBUG oslo_vmware.api [None req-74f2d880-0894-4c8b-96f9-91645aaf1004 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52003e19-403a-30a5-4312-8ef6295c4d19, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.084095] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3adfbbb9-0518-4c8b-ad81-18ec3ed0561b {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.089820] env[62503]: DEBUG nova.compute.manager [None req-f8d2fb1f-9ea0-4c7f-b322-3fd5d1fad38c tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: e0df0ce5-1e88-4a39-8911-529b235f5b88] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62503) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3375}} [ 946.090653] env[62503]: DEBUG nova.objects.instance [None req-f8d2fb1f-9ea0-4c7f-b322-3fd5d1fad38c tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Lazy-loading 'flavor' on Instance uuid e0df0ce5-1e88-4a39-8911-529b235f5b88 {{(pid=62503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 946.111896] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-52c126ff-63e1-418c-a270-6d77b9f52bbd tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] [instance: 629054bb-8fdb-45a2-8c07-216c4104d4a6] Unregistered the VM {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 946.112164] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-52c126ff-63e1-418c-a270-6d77b9f52bbd tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] [instance: 629054bb-8fdb-45a2-8c07-216c4104d4a6] Deleting contents of the VM from datastore datastore1 {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 946.112496] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-52c126ff-63e1-418c-a270-6d77b9f52bbd tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Deleting the datastore file [datastore1] 629054bb-8fdb-45a2-8c07-216c4104d4a6 {{(pid=62503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 946.112619] env[62503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d021cbb3-9b04-4ccf-b42e-fca30caebc5a {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.119265] env[62503]: DEBUG oslo_vmware.api [None req-52c126ff-63e1-418c-a270-6d77b9f52bbd tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Waiting for the task: (returnval){ [ 946.119265] env[62503]: value = "task-1387999" [ 946.119265] env[62503]: _type = "Task" [ 946.119265] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 946.128576] env[62503]: DEBUG oslo_vmware.api [None req-52c126ff-63e1-418c-a270-6d77b9f52bbd tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Task: {'id': task-1387999, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.305498] env[62503]: DEBUG oslo_vmware.api [None req-20ed2247-b43b-423a-b30c-af70223d8eff tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Task: {'id': task-1387997, 'name': CloneVM_Task} progress is 94%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.371194] env[62503]: DEBUG oslo_concurrency.lockutils [req-73b3775d-5afb-419d-bf20-ee72e48eaec5 req-0b1da619-cea5-4d07-a49e-0c110deccfc1 service nova] Releasing lock "refresh_cache-68f0c60d-ceff-4d7a-b81d-4845b4c5134c" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 946.450470] env[62503]: DEBUG oslo_concurrency.lockutils [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 946.470785] env[62503]: DEBUG oslo_concurrency.lockutils [None req-1f603411-3ca3-43e0-a6ad-82a61ccb75d8 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.237s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 946.471355] env[62503]: DEBUG nova.compute.manager [None req-1f603411-3ca3-43e0-a6ad-82a61ccb75d8 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: ce9c4d77-6cbe-411d-a0fd-d77a6d6f36b7] Start building networks asynchronously for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2832}} [ 946.473949] env[62503]: DEBUG oslo_concurrency.lockutils [None req-10c12c03-dafe-4bd9-9815-86ee70601878 tempest-InstanceActionsV221TestJSON-959512409 tempest-InstanceActionsV221TestJSON-959512409-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 7.195s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 946.474198] env[62503]: DEBUG nova.objects.instance [None req-10c12c03-dafe-4bd9-9815-86ee70601878 tempest-InstanceActionsV221TestJSON-959512409 tempest-InstanceActionsV221TestJSON-959512409-project-member] Lazy-loading 'resources' on Instance uuid 35bd28b5-101e-429f-8487-fbe5bf3528fb {{(pid=62503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 946.502657] env[62503]: DEBUG oslo_concurrency.lockutils [None req-bcaa8809-86dd-48c3-a137-03edfa93ef8e tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Lock "5ba614a3-17be-4069-8219-f88f4d27aab9" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.057s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 946.585783] env[62503]: DEBUG oslo_vmware.api [None req-74f2d880-0894-4c8b-96f9-91645aaf1004 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52003e19-403a-30a5-4312-8ef6295c4d19, 'name': SearchDatastore_Task, 'duration_secs': 0.010533} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 946.586089] env[62503]: DEBUG oslo_concurrency.lockutils [None req-74f2d880-0894-4c8b-96f9-91645aaf1004 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 946.586371] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-74f2d880-0894-4c8b-96f9-91645aaf1004 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk to [datastore2] 68f0c60d-ceff-4d7a-b81d-4845b4c5134c/68f0c60d-ceff-4d7a-b81d-4845b4c5134c.vmdk {{(pid=62503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 946.586635] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6f6d44c2-aea7-474d-80f9-c52ac3d0ee3c {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.592788] env[62503]: DEBUG oslo_vmware.api [None req-74f2d880-0894-4c8b-96f9-91645aaf1004 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Waiting for the task: (returnval){ [ 946.592788] env[62503]: value = "task-1388000" [ 946.592788] env[62503]: _type = "Task" [ 946.592788] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 946.603264] env[62503]: DEBUG oslo_vmware.api [None req-74f2d880-0894-4c8b-96f9-91645aaf1004 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Task: {'id': task-1388000, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.628260] env[62503]: DEBUG oslo_vmware.api [None req-52c126ff-63e1-418c-a270-6d77b9f52bbd tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Task: {'id': task-1387999, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.174074} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 946.628510] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-52c126ff-63e1-418c-a270-6d77b9f52bbd tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Deleted the datastore file {{(pid=62503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 946.628703] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-52c126ff-63e1-418c-a270-6d77b9f52bbd tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] [instance: 629054bb-8fdb-45a2-8c07-216c4104d4a6] Deleted contents of the VM from datastore datastore1 {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 946.628987] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-52c126ff-63e1-418c-a270-6d77b9f52bbd tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] [instance: 629054bb-8fdb-45a2-8c07-216c4104d4a6] Instance destroyed {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 946.629106] env[62503]: INFO nova.compute.manager [None req-52c126ff-63e1-418c-a270-6d77b9f52bbd tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] [instance: 629054bb-8fdb-45a2-8c07-216c4104d4a6] Took 1.12 seconds to destroy the instance on the hypervisor. [ 946.629316] env[62503]: DEBUG oslo.service.loopingcall [None req-52c126ff-63e1-418c-a270-6d77b9f52bbd tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 946.629515] env[62503]: DEBUG nova.compute.manager [-] [instance: 629054bb-8fdb-45a2-8c07-216c4104d4a6] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 946.629609] env[62503]: DEBUG nova.network.neutron [-] [instance: 629054bb-8fdb-45a2-8c07-216c4104d4a6] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 946.806961] env[62503]: DEBUG oslo_vmware.api [None req-20ed2247-b43b-423a-b30c-af70223d8eff tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Task: {'id': task-1387997, 'name': CloneVM_Task} progress is 94%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.882066] env[62503]: INFO nova.compute.manager [None req-4d433f7d-faf4-4699-ab6f-6c22fad1a618 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: 5ba614a3-17be-4069-8219-f88f4d27aab9] Rebuilding instance [ 946.938086] env[62503]: DEBUG nova.compute.manager [None req-4d433f7d-faf4-4699-ab6f-6c22fad1a618 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: 5ba614a3-17be-4069-8219-f88f4d27aab9] Checking state {{(pid=62503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1794}} [ 946.939089] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-146aaf23-06f5-4fae-9b5c-74c4ea23c6cf {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.976849] env[62503]: DEBUG nova.compute.utils [None req-1f603411-3ca3-43e0-a6ad-82a61ccb75d8 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Using /dev/sd instead of None {{(pid=62503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 946.981579] env[62503]: DEBUG nova.compute.manager [None req-1f603411-3ca3-43e0-a6ad-82a61ccb75d8 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: ce9c4d77-6cbe-411d-a0fd-d77a6d6f36b7] Allocating IP information in the background. {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 946.981817] env[62503]: DEBUG nova.network.neutron [None req-1f603411-3ca3-43e0-a6ad-82a61ccb75d8 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: ce9c4d77-6cbe-411d-a0fd-d77a6d6f36b7] allocate_for_instance() {{(pid=62503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 947.033116] env[62503]: DEBUG nova.policy [None req-1f603411-3ca3-43e0-a6ad-82a61ccb75d8 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8d1fa794892747598a9c0b50bfd82581', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '12a42517cf8f4ad3836f2f95e8833dd4', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62503) authorize /opt/stack/nova/nova/policy.py:201}} [ 947.100463] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-f8d2fb1f-9ea0-4c7f-b322-3fd5d1fad38c tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: e0df0ce5-1e88-4a39-8911-529b235f5b88] Powering off the VM {{(pid=62503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 947.102017] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-68a39941-3272-43a0-bcc3-679364e2d592 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.107797] env[62503]: DEBUG oslo_vmware.api [None req-74f2d880-0894-4c8b-96f9-91645aaf1004 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Task: {'id': task-1388000, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.113635] env[62503]: DEBUG oslo_vmware.api [None req-f8d2fb1f-9ea0-4c7f-b322-3fd5d1fad38c tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Waiting for the task: (returnval){ [ 947.113635] env[62503]: value = "task-1388001" [ 947.113635] env[62503]: _type = "Task" [ 947.113635] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 947.129755] env[62503]: DEBUG oslo_vmware.api [None req-f8d2fb1f-9ea0-4c7f-b322-3fd5d1fad38c tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Task: {'id': task-1388001, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.184318] env[62503]: DEBUG nova.compute.manager [req-a2bf60a2-6cb4-4d9b-b666-1153b0e417d8 req-2a2d6a35-01aa-4479-8b85-4b2c9e14977f service nova] [instance: 629054bb-8fdb-45a2-8c07-216c4104d4a6] Received event network-vif-deleted-269d53b9-4861-484d-bdac-553a7fafc310 {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 947.184768] env[62503]: INFO nova.compute.manager [req-a2bf60a2-6cb4-4d9b-b666-1153b0e417d8 req-2a2d6a35-01aa-4479-8b85-4b2c9e14977f service nova] [instance: 629054bb-8fdb-45a2-8c07-216c4104d4a6] Neutron deleted interface 269d53b9-4861-484d-bdac-553a7fafc310; detaching it from the instance and deleting it from the info cache [ 947.185105] env[62503]: DEBUG nova.network.neutron [req-a2bf60a2-6cb4-4d9b-b666-1153b0e417d8 req-2a2d6a35-01aa-4479-8b85-4b2c9e14977f service nova] [instance: 629054bb-8fdb-45a2-8c07-216c4104d4a6] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 947.191023] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8485be4-4161-4416-a0db-0a9825728760 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.196355] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-618c7d18-d188-4561-b17f-cd851c81f587 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.233746] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6c4590f-cdc1-4eaa-9bb1-4d20882e41b5 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.241980] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-039d5684-ea62-485a-b61f-4c7c5cf91bbb {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.261135] env[62503]: DEBUG nova.compute.provider_tree [None req-10c12c03-dafe-4bd9-9815-86ee70601878 tempest-InstanceActionsV221TestJSON-959512409 tempest-InstanceActionsV221TestJSON-959512409-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 947.307572] env[62503]: DEBUG oslo_vmware.api [None req-20ed2247-b43b-423a-b30c-af70223d8eff tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Task: {'id': task-1387997, 'name': CloneVM_Task} progress is 95%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.342975] env[62503]: DEBUG nova.network.neutron [None req-1f603411-3ca3-43e0-a6ad-82a61ccb75d8 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: ce9c4d77-6cbe-411d-a0fd-d77a6d6f36b7] Successfully created port: 228b554a-2e8c-4413-833d-65514ca1285a {{(pid=62503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 947.482604] env[62503]: DEBUG nova.compute.manager [None req-1f603411-3ca3-43e0-a6ad-82a61ccb75d8 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: ce9c4d77-6cbe-411d-a0fd-d77a6d6f36b7] Start building block device mappings for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2867}} [ 947.604098] env[62503]: DEBUG oslo_vmware.api [None req-74f2d880-0894-4c8b-96f9-91645aaf1004 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Task: {'id': task-1388000, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.536326} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 947.604386] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-74f2d880-0894-4c8b-96f9-91645aaf1004 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk to [datastore2] 68f0c60d-ceff-4d7a-b81d-4845b4c5134c/68f0c60d-ceff-4d7a-b81d-4845b4c5134c.vmdk {{(pid=62503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 947.604656] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-74f2d880-0894-4c8b-96f9-91645aaf1004 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: 68f0c60d-ceff-4d7a-b81d-4845b4c5134c] Extending root virtual disk to 1048576 {{(pid=62503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 947.604930] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b868a56e-c104-4ae8-aa1d-2a5637cc5531 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.613497] env[62503]: DEBUG oslo_vmware.api [None req-74f2d880-0894-4c8b-96f9-91645aaf1004 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Waiting for the task: (returnval){ [ 947.613497] env[62503]: value = "task-1388002" [ 947.613497] env[62503]: _type = "Task" [ 947.613497] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 947.623652] env[62503]: DEBUG nova.network.neutron [-] [instance: 629054bb-8fdb-45a2-8c07-216c4104d4a6] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 947.624792] env[62503]: DEBUG oslo_vmware.api [None req-74f2d880-0894-4c8b-96f9-91645aaf1004 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Task: {'id': task-1388002, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.627812] env[62503]: DEBUG oslo_vmware.api [None req-f8d2fb1f-9ea0-4c7f-b322-3fd5d1fad38c tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Task: {'id': task-1388001, 'name': PowerOffVM_Task, 'duration_secs': 0.188001} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 947.628059] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-f8d2fb1f-9ea0-4c7f-b322-3fd5d1fad38c tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: e0df0ce5-1e88-4a39-8911-529b235f5b88] Powered off the VM {{(pid=62503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 947.628261] env[62503]: DEBUG nova.compute.manager [None req-f8d2fb1f-9ea0-4c7f-b322-3fd5d1fad38c tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: e0df0ce5-1e88-4a39-8911-529b235f5b88] Checking state {{(pid=62503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1794}} [ 947.629041] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae448c75-7137-48f2-8220-cf90caf0a522 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.691600] env[62503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7aca97e9-a6c0-4063-ad06-dd5957a99950 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.700147] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b765643-9f38-4718-b80e-12b94dba7acb {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.726990] env[62503]: DEBUG nova.compute.manager [req-a2bf60a2-6cb4-4d9b-b666-1153b0e417d8 req-2a2d6a35-01aa-4479-8b85-4b2c9e14977f service nova] [instance: 629054bb-8fdb-45a2-8c07-216c4104d4a6] Detach interface failed, port_id=269d53b9-4861-484d-bdac-553a7fafc310, reason: Instance 629054bb-8fdb-45a2-8c07-216c4104d4a6 could not be found. {{(pid=62503) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11261}} [ 947.764110] env[62503]: DEBUG nova.scheduler.client.report [None req-10c12c03-dafe-4bd9-9815-86ee70601878 tempest-InstanceActionsV221TestJSON-959512409 tempest-InstanceActionsV221TestJSON-959512409-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 947.808569] env[62503]: DEBUG oslo_vmware.api [None req-20ed2247-b43b-423a-b30c-af70223d8eff tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Task: {'id': task-1387997, 'name': CloneVM_Task} progress is 100%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.954307] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d433f7d-faf4-4699-ab6f-6c22fad1a618 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: 5ba614a3-17be-4069-8219-f88f4d27aab9] Powering off the VM {{(pid=62503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 947.954726] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a88d1b8d-ab1e-4a1d-8cfd-fe014d315f11 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.961999] env[62503]: DEBUG oslo_vmware.api [None req-4d433f7d-faf4-4699-ab6f-6c22fad1a618 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Waiting for the task: (returnval){ [ 947.961999] env[62503]: value = "task-1388003" [ 947.961999] env[62503]: _type = "Task" [ 947.961999] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 947.969773] env[62503]: DEBUG oslo_vmware.api [None req-4d433f7d-faf4-4699-ab6f-6c22fad1a618 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1388003, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.122416] env[62503]: DEBUG oslo_vmware.api [None req-74f2d880-0894-4c8b-96f9-91645aaf1004 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Task: {'id': task-1388002, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.265461} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 948.122699] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-74f2d880-0894-4c8b-96f9-91645aaf1004 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: 68f0c60d-ceff-4d7a-b81d-4845b4c5134c] Extended root virtual disk {{(pid=62503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 948.123483] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ae9fe74-bbb1-4e46-9fae-b616ca1d785f {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.126640] env[62503]: INFO nova.compute.manager [-] [instance: 629054bb-8fdb-45a2-8c07-216c4104d4a6] Took 1.50 seconds to deallocate network for instance. [ 948.152200] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-74f2d880-0894-4c8b-96f9-91645aaf1004 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: 68f0c60d-ceff-4d7a-b81d-4845b4c5134c] Reconfiguring VM instance instance-00000057 to attach disk [datastore2] 68f0c60d-ceff-4d7a-b81d-4845b4c5134c/68f0c60d-ceff-4d7a-b81d-4845b4c5134c.vmdk or device None with type sparse {{(pid=62503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 948.152897] env[62503]: DEBUG oslo_concurrency.lockutils [None req-f8d2fb1f-9ea0-4c7f-b322-3fd5d1fad38c tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Lock "e0df0ce5-1e88-4a39-8911-529b235f5b88" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.070s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 948.153828] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4cca5909-2dd1-4841-9876-c6219339dd7f {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.180446] env[62503]: DEBUG oslo_vmware.api [None req-74f2d880-0894-4c8b-96f9-91645aaf1004 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Waiting for the task: (returnval){ [ 948.180446] env[62503]: value = "task-1388004" [ 948.180446] env[62503]: _type = "Task" [ 948.180446] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 948.190630] env[62503]: DEBUG oslo_vmware.api [None req-74f2d880-0894-4c8b-96f9-91645aaf1004 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Task: {'id': task-1388004, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.269072] env[62503]: DEBUG oslo_concurrency.lockutils [None req-10c12c03-dafe-4bd9-9815-86ee70601878 tempest-InstanceActionsV221TestJSON-959512409 tempest-InstanceActionsV221TestJSON-959512409-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.795s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 948.271563] env[62503]: DEBUG oslo_concurrency.lockutils [None req-93e53ce4-ad3e-439a-926c-0196bf562468 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.143s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 948.273166] env[62503]: INFO nova.compute.claims [None req-93e53ce4-ad3e-439a-926c-0196bf562468 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: 7eb8e049-dd65-43bd-829a-8f773f7ad156] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 948.292123] env[62503]: INFO nova.scheduler.client.report [None req-10c12c03-dafe-4bd9-9815-86ee70601878 tempest-InstanceActionsV221TestJSON-959512409 tempest-InstanceActionsV221TestJSON-959512409-project-member] Deleted allocations for instance 35bd28b5-101e-429f-8487-fbe5bf3528fb [ 948.310290] env[62503]: DEBUG oslo_vmware.api [None req-20ed2247-b43b-423a-b30c-af70223d8eff tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Task: {'id': task-1387997, 'name': CloneVM_Task} progress is 100%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.473099] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d433f7d-faf4-4699-ab6f-6c22fad1a618 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: 5ba614a3-17be-4069-8219-f88f4d27aab9] VM already powered off {{(pid=62503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 948.473368] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-4d433f7d-faf4-4699-ab6f-6c22fad1a618 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: 5ba614a3-17be-4069-8219-f88f4d27aab9] Destroying instance {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 948.474140] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-941a9910-1007-4f46-b6da-2f3eaf73679c {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.481099] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-4d433f7d-faf4-4699-ab6f-6c22fad1a618 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: 5ba614a3-17be-4069-8219-f88f4d27aab9] Unregistering the VM {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 948.481356] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ee624696-602d-4193-bbb4-e108ebe62ffd {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.491435] env[62503]: DEBUG nova.compute.manager [None req-1f603411-3ca3-43e0-a6ad-82a61ccb75d8 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: ce9c4d77-6cbe-411d-a0fd-d77a6d6f36b7] Start spawning the instance on the hypervisor. {{(pid=62503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2641}} [ 948.516704] env[62503]: DEBUG nova.virt.hardware [None req-1f603411-3ca3-43e0-a6ad-82a61ccb75d8 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:26:20Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:26:03Z,direct_url=,disk_format='vmdk',id=8150ca02-f879-471d-8913-459408f127a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26d9ee75d25b4018b8bb1fb11c8bd98c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:26:04Z,virtual_size=,visibility=), allow threads: False {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 948.517019] env[62503]: DEBUG nova.virt.hardware [None req-1f603411-3ca3-43e0-a6ad-82a61ccb75d8 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Flavor limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 948.517019] env[62503]: DEBUG nova.virt.hardware [None req-1f603411-3ca3-43e0-a6ad-82a61ccb75d8 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Image limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 948.517119] env[62503]: DEBUG nova.virt.hardware [None req-1f603411-3ca3-43e0-a6ad-82a61ccb75d8 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Flavor pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 948.517279] env[62503]: DEBUG nova.virt.hardware [None req-1f603411-3ca3-43e0-a6ad-82a61ccb75d8 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Image pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 948.517432] env[62503]: DEBUG nova.virt.hardware [None req-1f603411-3ca3-43e0-a6ad-82a61ccb75d8 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 948.517642] env[62503]: DEBUG nova.virt.hardware [None req-1f603411-3ca3-43e0-a6ad-82a61ccb75d8 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 948.517805] env[62503]: DEBUG nova.virt.hardware [None req-1f603411-3ca3-43e0-a6ad-82a61ccb75d8 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 948.517976] env[62503]: DEBUG nova.virt.hardware [None req-1f603411-3ca3-43e0-a6ad-82a61ccb75d8 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Got 1 possible topologies {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 948.518159] env[62503]: DEBUG nova.virt.hardware [None req-1f603411-3ca3-43e0-a6ad-82a61ccb75d8 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 948.518340] env[62503]: DEBUG nova.virt.hardware [None req-1f603411-3ca3-43e0-a6ad-82a61ccb75d8 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 948.519280] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f5479aa-4b22-471c-a46e-b0ce0ebf812f {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.522411] env[62503]: DEBUG oslo_concurrency.lockutils [None req-edd647b4-5eaa-43a5-af02-1232c3b4376c tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Acquiring lock "e0df0ce5-1e88-4a39-8911-529b235f5b88" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 948.522634] env[62503]: DEBUG oslo_concurrency.lockutils [None req-edd647b4-5eaa-43a5-af02-1232c3b4376c tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Lock "e0df0ce5-1e88-4a39-8911-529b235f5b88" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 948.522841] env[62503]: DEBUG oslo_concurrency.lockutils [None req-edd647b4-5eaa-43a5-af02-1232c3b4376c tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Acquiring lock "e0df0ce5-1e88-4a39-8911-529b235f5b88-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 948.523062] env[62503]: DEBUG oslo_concurrency.lockutils [None req-edd647b4-5eaa-43a5-af02-1232c3b4376c tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Lock "e0df0ce5-1e88-4a39-8911-529b235f5b88-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 948.523267] env[62503]: DEBUG oslo_concurrency.lockutils [None req-edd647b4-5eaa-43a5-af02-1232c3b4376c tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Lock "e0df0ce5-1e88-4a39-8911-529b235f5b88-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 948.527317] env[62503]: INFO nova.compute.manager [None req-edd647b4-5eaa-43a5-af02-1232c3b4376c tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: e0df0ce5-1e88-4a39-8911-529b235f5b88] Terminating instance [ 948.529609] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f1da733-da0b-49a4-9c9c-fc0bf7e10d8e {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.533617] env[62503]: DEBUG nova.compute.manager [None req-edd647b4-5eaa-43a5-af02-1232c3b4376c tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: e0df0ce5-1e88-4a39-8911-529b235f5b88] Start destroying the instance on the hypervisor. {{(pid=62503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3156}} [ 948.533811] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-edd647b4-5eaa-43a5-af02-1232c3b4376c tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: e0df0ce5-1e88-4a39-8911-529b235f5b88] Destroying instance {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 948.534526] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdf67406-e1c1-45f9-89da-ebdde7275f5c {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.548579] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-edd647b4-5eaa-43a5-af02-1232c3b4376c tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: e0df0ce5-1e88-4a39-8911-529b235f5b88] Unregistering the VM {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 948.548794] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-94e25cee-971c-4532-a814-602a6ed1cd57 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.587413] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-4d433f7d-faf4-4699-ab6f-6c22fad1a618 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: 5ba614a3-17be-4069-8219-f88f4d27aab9] Unregistered the VM {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 948.587648] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-4d433f7d-faf4-4699-ab6f-6c22fad1a618 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: 5ba614a3-17be-4069-8219-f88f4d27aab9] Deleting contents of the VM from datastore datastore1 {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 948.587832] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-4d433f7d-faf4-4699-ab6f-6c22fad1a618 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Deleting the datastore file [datastore1] 5ba614a3-17be-4069-8219-f88f4d27aab9 {{(pid=62503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 948.588126] env[62503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f01d43e6-6a14-41bb-91aa-a7e7a389a9cf {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.594780] env[62503]: DEBUG oslo_vmware.api [None req-4d433f7d-faf4-4699-ab6f-6c22fad1a618 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Waiting for the task: (returnval){ [ 948.594780] env[62503]: value = "task-1388007" [ 948.594780] env[62503]: _type = "Task" [ 948.594780] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 948.602309] env[62503]: DEBUG oslo_vmware.api [None req-4d433f7d-faf4-4699-ab6f-6c22fad1a618 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1388007, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.619667] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-edd647b4-5eaa-43a5-af02-1232c3b4376c tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: e0df0ce5-1e88-4a39-8911-529b235f5b88] Unregistered the VM {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 948.619905] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-edd647b4-5eaa-43a5-af02-1232c3b4376c tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: e0df0ce5-1e88-4a39-8911-529b235f5b88] Deleting contents of the VM from datastore datastore1 {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 948.620163] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-edd647b4-5eaa-43a5-af02-1232c3b4376c tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Deleting the datastore file [datastore1] e0df0ce5-1e88-4a39-8911-529b235f5b88 {{(pid=62503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 948.620469] env[62503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-71897ccd-94e4-4299-acc2-2fa0fcb4aeeb {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.626887] env[62503]: DEBUG oslo_vmware.api [None req-edd647b4-5eaa-43a5-af02-1232c3b4376c tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Waiting for the task: (returnval){ [ 948.626887] env[62503]: value = "task-1388008" [ 948.626887] env[62503]: _type = "Task" [ 948.626887] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 948.635031] env[62503]: DEBUG oslo_vmware.api [None req-edd647b4-5eaa-43a5-af02-1232c3b4376c tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Task: {'id': task-1388008, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.654846] env[62503]: DEBUG oslo_concurrency.lockutils [None req-52c126ff-63e1-418c-a270-6d77b9f52bbd tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 948.691399] env[62503]: DEBUG oslo_vmware.api [None req-74f2d880-0894-4c8b-96f9-91645aaf1004 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Task: {'id': task-1388004, 'name': ReconfigVM_Task, 'duration_secs': 0.262724} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 948.691534] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-74f2d880-0894-4c8b-96f9-91645aaf1004 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: 68f0c60d-ceff-4d7a-b81d-4845b4c5134c] Reconfigured VM instance instance-00000057 to attach disk [datastore2] 68f0c60d-ceff-4d7a-b81d-4845b4c5134c/68f0c60d-ceff-4d7a-b81d-4845b4c5134c.vmdk or device None with type sparse {{(pid=62503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 948.692163] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a9e46242-8f0d-4698-919a-ea55a99f15f2 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.698669] env[62503]: DEBUG oslo_vmware.api [None req-74f2d880-0894-4c8b-96f9-91645aaf1004 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Waiting for the task: (returnval){ [ 948.698669] env[62503]: value = "task-1388009" [ 948.698669] env[62503]: _type = "Task" [ 948.698669] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 948.706368] env[62503]: DEBUG oslo_vmware.api [None req-74f2d880-0894-4c8b-96f9-91645aaf1004 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Task: {'id': task-1388009, 'name': Rename_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.798915] env[62503]: DEBUG oslo_concurrency.lockutils [None req-10c12c03-dafe-4bd9-9815-86ee70601878 tempest-InstanceActionsV221TestJSON-959512409 tempest-InstanceActionsV221TestJSON-959512409-project-member] Lock "35bd28b5-101e-429f-8487-fbe5bf3528fb" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 12.413s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 948.811895] env[62503]: DEBUG oslo_vmware.api [None req-20ed2247-b43b-423a-b30c-af70223d8eff tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Task: {'id': task-1387997, 'name': CloneVM_Task, 'duration_secs': 2.529034} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 948.812337] env[62503]: INFO nova.virt.vmwareapi.vmops [None req-20ed2247-b43b-423a-b30c-af70223d8eff tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: 09ff6008-f1eb-4ee0-af7d-c7f8268e3eb9] Created linked-clone VM from snapshot [ 948.813605] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a25e899-9a7a-4e6d-9339-33fdb161638f {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.823969] env[62503]: DEBUG nova.virt.vmwareapi.images [None req-20ed2247-b43b-423a-b30c-af70223d8eff tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: 09ff6008-f1eb-4ee0-af7d-c7f8268e3eb9] Uploading image 45d853a9-d2c7-4285-a194-2ae06b428b35 {{(pid=62503) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 948.835058] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-20ed2247-b43b-423a-b30c-af70223d8eff tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: 09ff6008-f1eb-4ee0-af7d-c7f8268e3eb9] Destroying the VM {{(pid=62503) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 948.835361] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-8d47e703-1063-4a0e-8da2-1b2753bf024f {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.842160] env[62503]: DEBUG oslo_vmware.api [None req-20ed2247-b43b-423a-b30c-af70223d8eff tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Waiting for the task: (returnval){ [ 948.842160] env[62503]: value = "task-1388010" [ 948.842160] env[62503]: _type = "Task" [ 948.842160] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 948.851670] env[62503]: DEBUG oslo_vmware.api [None req-20ed2247-b43b-423a-b30c-af70223d8eff tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Task: {'id': task-1388010, 'name': Destroy_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.989285] env[62503]: DEBUG nova.compute.manager [req-37375152-59a1-490e-98ea-2ff7371bcb2e req-89f6d65f-e0ce-4f0b-9a0a-9b89ba9bd2eb service nova] [instance: ce9c4d77-6cbe-411d-a0fd-d77a6d6f36b7] Received event network-vif-plugged-228b554a-2e8c-4413-833d-65514ca1285a {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 948.989656] env[62503]: DEBUG oslo_concurrency.lockutils [req-37375152-59a1-490e-98ea-2ff7371bcb2e req-89f6d65f-e0ce-4f0b-9a0a-9b89ba9bd2eb service nova] Acquiring lock "ce9c4d77-6cbe-411d-a0fd-d77a6d6f36b7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 948.990237] env[62503]: DEBUG oslo_concurrency.lockutils [req-37375152-59a1-490e-98ea-2ff7371bcb2e req-89f6d65f-e0ce-4f0b-9a0a-9b89ba9bd2eb service nova] Lock "ce9c4d77-6cbe-411d-a0fd-d77a6d6f36b7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 948.990340] env[62503]: DEBUG oslo_concurrency.lockutils [req-37375152-59a1-490e-98ea-2ff7371bcb2e req-89f6d65f-e0ce-4f0b-9a0a-9b89ba9bd2eb service nova] Lock "ce9c4d77-6cbe-411d-a0fd-d77a6d6f36b7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 948.990631] env[62503]: DEBUG nova.compute.manager [req-37375152-59a1-490e-98ea-2ff7371bcb2e req-89f6d65f-e0ce-4f0b-9a0a-9b89ba9bd2eb service nova] [instance: ce9c4d77-6cbe-411d-a0fd-d77a6d6f36b7] No waiting events found dispatching network-vif-plugged-228b554a-2e8c-4413-833d-65514ca1285a {{(pid=62503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 948.990949] env[62503]: WARNING nova.compute.manager [req-37375152-59a1-490e-98ea-2ff7371bcb2e req-89f6d65f-e0ce-4f0b-9a0a-9b89ba9bd2eb service nova] [instance: ce9c4d77-6cbe-411d-a0fd-d77a6d6f36b7] Received unexpected event network-vif-plugged-228b554a-2e8c-4413-833d-65514ca1285a for instance with vm_state building and task_state spawning. [ 949.072333] env[62503]: DEBUG nova.network.neutron [None req-1f603411-3ca3-43e0-a6ad-82a61ccb75d8 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: ce9c4d77-6cbe-411d-a0fd-d77a6d6f36b7] Successfully updated port: 228b554a-2e8c-4413-833d-65514ca1285a {{(pid=62503) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 949.105773] env[62503]: DEBUG oslo_vmware.api [None req-4d433f7d-faf4-4699-ab6f-6c22fad1a618 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1388007, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.152045} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 949.106092] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-4d433f7d-faf4-4699-ab6f-6c22fad1a618 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Deleted the datastore file {{(pid=62503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 949.106382] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-4d433f7d-faf4-4699-ab6f-6c22fad1a618 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: 5ba614a3-17be-4069-8219-f88f4d27aab9] Deleted contents of the VM from datastore datastore1 {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 949.108831] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-4d433f7d-faf4-4699-ab6f-6c22fad1a618 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: 5ba614a3-17be-4069-8219-f88f4d27aab9] Instance destroyed {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 949.137057] env[62503]: DEBUG oslo_vmware.api [None req-edd647b4-5eaa-43a5-af02-1232c3b4376c tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Task: {'id': task-1388008, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.1542} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 949.137460] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-edd647b4-5eaa-43a5-af02-1232c3b4376c tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Deleted the datastore file {{(pid=62503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 949.138059] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-edd647b4-5eaa-43a5-af02-1232c3b4376c tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: e0df0ce5-1e88-4a39-8911-529b235f5b88] Deleted contents of the VM from datastore datastore1 {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 949.138177] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-edd647b4-5eaa-43a5-af02-1232c3b4376c tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: e0df0ce5-1e88-4a39-8911-529b235f5b88] Instance destroyed {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 949.138469] env[62503]: INFO nova.compute.manager [None req-edd647b4-5eaa-43a5-af02-1232c3b4376c tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: e0df0ce5-1e88-4a39-8911-529b235f5b88] Took 0.60 seconds to destroy the instance on the hypervisor. [ 949.138906] env[62503]: DEBUG oslo.service.loopingcall [None req-edd647b4-5eaa-43a5-af02-1232c3b4376c tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 949.139246] env[62503]: DEBUG nova.compute.manager [-] [instance: e0df0ce5-1e88-4a39-8911-529b235f5b88] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 949.139418] env[62503]: DEBUG nova.network.neutron [-] [instance: e0df0ce5-1e88-4a39-8911-529b235f5b88] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 949.208927] env[62503]: DEBUG oslo_vmware.api [None req-74f2d880-0894-4c8b-96f9-91645aaf1004 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Task: {'id': task-1388009, 'name': Rename_Task, 'duration_secs': 0.154062} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 949.209185] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-74f2d880-0894-4c8b-96f9-91645aaf1004 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: 68f0c60d-ceff-4d7a-b81d-4845b4c5134c] Powering on the VM {{(pid=62503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 949.209451] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1ec59bca-5a33-4ed5-99c2-2d2d0b127737 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.216545] env[62503]: DEBUG oslo_vmware.api [None req-74f2d880-0894-4c8b-96f9-91645aaf1004 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Waiting for the task: (returnval){ [ 949.216545] env[62503]: value = "task-1388011" [ 949.216545] env[62503]: _type = "Task" [ 949.216545] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 949.224803] env[62503]: DEBUG oslo_vmware.api [None req-74f2d880-0894-4c8b-96f9-91645aaf1004 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Task: {'id': task-1388011, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.354101] env[62503]: DEBUG oslo_vmware.api [None req-20ed2247-b43b-423a-b30c-af70223d8eff tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Task: {'id': task-1388010, 'name': Destroy_Task, 'duration_secs': 0.354771} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 949.354616] env[62503]: INFO nova.virt.vmwareapi.vm_util [None req-20ed2247-b43b-423a-b30c-af70223d8eff tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: 09ff6008-f1eb-4ee0-af7d-c7f8268e3eb9] Destroyed the VM [ 949.354981] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-20ed2247-b43b-423a-b30c-af70223d8eff tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: 09ff6008-f1eb-4ee0-af7d-c7f8268e3eb9] Deleting Snapshot of the VM instance {{(pid=62503) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 949.355320] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-c273f781-9a46-4c82-8c1c-0a197b7cee6d {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.363302] env[62503]: DEBUG oslo_vmware.api [None req-20ed2247-b43b-423a-b30c-af70223d8eff tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Waiting for the task: (returnval){ [ 949.363302] env[62503]: value = "task-1388012" [ 949.363302] env[62503]: _type = "Task" [ 949.363302] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 949.374236] env[62503]: DEBUG oslo_vmware.api [None req-20ed2247-b43b-423a-b30c-af70223d8eff tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Task: {'id': task-1388012, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.435372] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fac878e-d5dd-4adb-baf0-453f0b396a42 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.443236] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-862fae6a-31c5-4136-ab07-6e6a28a21319 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.479466] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7088fac-69f6-4a95-90c1-d62fc6e847d4 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.487672] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4bbc859-3a7b-491d-a8c0-3cbdb8825b7d {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.501390] env[62503]: DEBUG nova.compute.provider_tree [None req-93e53ce4-ad3e-439a-926c-0196bf562468 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 949.574686] env[62503]: DEBUG oslo_concurrency.lockutils [None req-1f603411-3ca3-43e0-a6ad-82a61ccb75d8 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Acquiring lock "refresh_cache-ce9c4d77-6cbe-411d-a0fd-d77a6d6f36b7" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 949.574866] env[62503]: DEBUG oslo_concurrency.lockutils [None req-1f603411-3ca3-43e0-a6ad-82a61ccb75d8 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Acquired lock "refresh_cache-ce9c4d77-6cbe-411d-a0fd-d77a6d6f36b7" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 949.574983] env[62503]: DEBUG nova.network.neutron [None req-1f603411-3ca3-43e0-a6ad-82a61ccb75d8 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: ce9c4d77-6cbe-411d-a0fd-d77a6d6f36b7] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 949.726333] env[62503]: DEBUG oslo_vmware.api [None req-74f2d880-0894-4c8b-96f9-91645aaf1004 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Task: {'id': task-1388011, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.871715] env[62503]: DEBUG oslo_vmware.api [None req-20ed2247-b43b-423a-b30c-af70223d8eff tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Task: {'id': task-1388012, 'name': RemoveSnapshot_Task} progress is 97%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.949832] env[62503]: DEBUG nova.network.neutron [-] [instance: e0df0ce5-1e88-4a39-8911-529b235f5b88] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 950.004846] env[62503]: DEBUG nova.scheduler.client.report [None req-93e53ce4-ad3e-439a-926c-0196bf562468 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 950.118629] env[62503]: DEBUG nova.network.neutron [None req-1f603411-3ca3-43e0-a6ad-82a61ccb75d8 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: ce9c4d77-6cbe-411d-a0fd-d77a6d6f36b7] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 950.148260] env[62503]: DEBUG nova.virt.hardware [None req-4d433f7d-faf4-4699-ab6f-6c22fad1a618 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:26:20Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:26:03Z,direct_url=,disk_format='vmdk',id=8150ca02-f879-471d-8913-459408f127a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26d9ee75d25b4018b8bb1fb11c8bd98c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:26:04Z,virtual_size=,visibility=), allow threads: False {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 950.148524] env[62503]: DEBUG nova.virt.hardware [None req-4d433f7d-faf4-4699-ab6f-6c22fad1a618 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Flavor limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 950.148686] env[62503]: DEBUG nova.virt.hardware [None req-4d433f7d-faf4-4699-ab6f-6c22fad1a618 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Image limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 950.148872] env[62503]: DEBUG nova.virt.hardware [None req-4d433f7d-faf4-4699-ab6f-6c22fad1a618 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Flavor pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 950.149052] env[62503]: DEBUG nova.virt.hardware [None req-4d433f7d-faf4-4699-ab6f-6c22fad1a618 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Image pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 950.149256] env[62503]: DEBUG nova.virt.hardware [None req-4d433f7d-faf4-4699-ab6f-6c22fad1a618 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 950.149474] env[62503]: DEBUG nova.virt.hardware [None req-4d433f7d-faf4-4699-ab6f-6c22fad1a618 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 950.149642] env[62503]: DEBUG nova.virt.hardware [None req-4d433f7d-faf4-4699-ab6f-6c22fad1a618 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 950.149815] env[62503]: DEBUG nova.virt.hardware [None req-4d433f7d-faf4-4699-ab6f-6c22fad1a618 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Got 1 possible topologies {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 950.149976] env[62503]: DEBUG nova.virt.hardware [None req-4d433f7d-faf4-4699-ab6f-6c22fad1a618 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 950.150170] env[62503]: DEBUG nova.virt.hardware [None req-4d433f7d-faf4-4699-ab6f-6c22fad1a618 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 950.151391] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccd4036b-b22c-443b-96e0-e6117b6bf7c5 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.159570] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76856d9c-e9cf-42a2-b521-07542cd36b76 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.174647] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-4d433f7d-faf4-4699-ab6f-6c22fad1a618 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: 5ba614a3-17be-4069-8219-f88f4d27aab9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:51:47:fd', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '77aa121f-8fb6-42f3-aaea-43addfe449b2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4be6b321-1129-4f1b-9ca4-aa83bc5b715d', 'vif_model': 'vmxnet3'}] {{(pid=62503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 950.182199] env[62503]: DEBUG oslo.service.loopingcall [None req-4d433f7d-faf4-4699-ab6f-6c22fad1a618 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 950.182474] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5ba614a3-17be-4069-8219-f88f4d27aab9] Creating VM on the ESX host {{(pid=62503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 950.182693] env[62503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1f21a8ce-afb3-43f3-8c5f-5fc06aaf9600 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.202529] env[62503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 950.202529] env[62503]: value = "task-1388013" [ 950.202529] env[62503]: _type = "Task" [ 950.202529] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 950.212051] env[62503]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388013, 'name': CreateVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.227127] env[62503]: DEBUG oslo_vmware.api [None req-74f2d880-0894-4c8b-96f9-91645aaf1004 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Task: {'id': task-1388011, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.272248] env[62503]: DEBUG nova.network.neutron [None req-1f603411-3ca3-43e0-a6ad-82a61ccb75d8 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: ce9c4d77-6cbe-411d-a0fd-d77a6d6f36b7] Updating instance_info_cache with network_info: [{"id": "228b554a-2e8c-4413-833d-65514ca1285a", "address": "fa:16:3e:bc:76:d8", "network": {"id": "ef338e2b-4aa8-4605-8748-910d0d3a3079", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-477257305-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "12a42517cf8f4ad3836f2f95e8833dd4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "13e71dbb-4279-427c-b39d-ba5df9895e58", "external-id": "nsx-vlan-transportzone-417", "segmentation_id": 417, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap228b554a-2e", "ovs_interfaceid": "228b554a-2e8c-4413-833d-65514ca1285a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 950.374430] env[62503]: DEBUG oslo_vmware.api [None req-20ed2247-b43b-423a-b30c-af70223d8eff tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Task: {'id': task-1388012, 'name': RemoveSnapshot_Task, 'duration_secs': 0.653163} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 950.374678] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-20ed2247-b43b-423a-b30c-af70223d8eff tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: 09ff6008-f1eb-4ee0-af7d-c7f8268e3eb9] Deleted Snapshot of the VM instance {{(pid=62503) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 950.454240] env[62503]: INFO nova.compute.manager [-] [instance: e0df0ce5-1e88-4a39-8911-529b235f5b88] Took 1.31 seconds to deallocate network for instance. [ 950.509884] env[62503]: DEBUG oslo_concurrency.lockutils [None req-93e53ce4-ad3e-439a-926c-0196bf562468 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.238s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 950.510470] env[62503]: DEBUG nova.compute.manager [None req-93e53ce4-ad3e-439a-926c-0196bf562468 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: 7eb8e049-dd65-43bd-829a-8f773f7ad156] Start building networks asynchronously for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2832}} [ 950.513105] env[62503]: DEBUG oslo_concurrency.lockutils [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 4.063s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 950.513242] env[62503]: DEBUG oslo_concurrency.lockutils [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 950.513380] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62503) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 950.513712] env[62503]: DEBUG oslo_concurrency.lockutils [None req-52c126ff-63e1-418c-a270-6d77b9f52bbd tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.859s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 950.513956] env[62503]: DEBUG nova.objects.instance [None req-52c126ff-63e1-418c-a270-6d77b9f52bbd tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Lazy-loading 'resources' on Instance uuid 629054bb-8fdb-45a2-8c07-216c4104d4a6 {{(pid=62503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 950.515508] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45520abb-3f89-425c-8978-d43252a24364 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.524920] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d695da7-3d3d-4d75-b1b7-144775e78dd8 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.539372] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c9a6646-ec23-4fa5-9b9d-23ea451842fc {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.546938] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7c0a7a5-0f79-491b-acff-97c8778c1874 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.578793] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180690MB free_disk=175GB free_vcpus=48 pci_devices=None {{(pid=62503) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 950.578985] env[62503]: DEBUG oslo_concurrency.lockutils [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 950.716878] env[62503]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388013, 'name': CreateVM_Task} progress is 25%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.726248] env[62503]: DEBUG oslo_vmware.api [None req-74f2d880-0894-4c8b-96f9-91645aaf1004 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Task: {'id': task-1388011, 'name': PowerOnVM_Task, 'duration_secs': 1.030092} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 950.726900] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-74f2d880-0894-4c8b-96f9-91645aaf1004 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: 68f0c60d-ceff-4d7a-b81d-4845b4c5134c] Powered on the VM {{(pid=62503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 950.726900] env[62503]: INFO nova.compute.manager [None req-74f2d880-0894-4c8b-96f9-91645aaf1004 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: 68f0c60d-ceff-4d7a-b81d-4845b4c5134c] Took 8.00 seconds to spawn the instance on the hypervisor. [ 950.727096] env[62503]: DEBUG nova.compute.manager [None req-74f2d880-0894-4c8b-96f9-91645aaf1004 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: 68f0c60d-ceff-4d7a-b81d-4845b4c5134c] Checking state {{(pid=62503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1794}} [ 950.727761] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5413b411-9d2e-416a-a85c-237a3dcbfdbf {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.774879] env[62503]: DEBUG oslo_concurrency.lockutils [None req-1f603411-3ca3-43e0-a6ad-82a61ccb75d8 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Releasing lock "refresh_cache-ce9c4d77-6cbe-411d-a0fd-d77a6d6f36b7" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 950.775221] env[62503]: DEBUG nova.compute.manager [None req-1f603411-3ca3-43e0-a6ad-82a61ccb75d8 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: ce9c4d77-6cbe-411d-a0fd-d77a6d6f36b7] Instance network_info: |[{"id": "228b554a-2e8c-4413-833d-65514ca1285a", "address": "fa:16:3e:bc:76:d8", "network": {"id": "ef338e2b-4aa8-4605-8748-910d0d3a3079", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-477257305-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "12a42517cf8f4ad3836f2f95e8833dd4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "13e71dbb-4279-427c-b39d-ba5df9895e58", "external-id": "nsx-vlan-transportzone-417", "segmentation_id": 417, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap228b554a-2e", "ovs_interfaceid": "228b554a-2e8c-4413-833d-65514ca1285a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1999}} [ 950.775681] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-1f603411-3ca3-43e0-a6ad-82a61ccb75d8 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: ce9c4d77-6cbe-411d-a0fd-d77a6d6f36b7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:bc:76:d8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '13e71dbb-4279-427c-b39d-ba5df9895e58', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '228b554a-2e8c-4413-833d-65514ca1285a', 'vif_model': 'vmxnet3'}] {{(pid=62503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 950.789560] env[62503]: DEBUG oslo.service.loopingcall [None req-1f603411-3ca3-43e0-a6ad-82a61ccb75d8 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 950.790219] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ce9c4d77-6cbe-411d-a0fd-d77a6d6f36b7] Creating VM on the ESX host {{(pid=62503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 950.790585] env[62503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-39dd605e-815b-4108-878d-1208733027df {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.813277] env[62503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 950.813277] env[62503]: value = "task-1388014" [ 950.813277] env[62503]: _type = "Task" [ 950.813277] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 950.821417] env[62503]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388014, 'name': CreateVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.882293] env[62503]: WARNING nova.compute.manager [None req-20ed2247-b43b-423a-b30c-af70223d8eff tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: 09ff6008-f1eb-4ee0-af7d-c7f8268e3eb9] Image not found during snapshot: nova.exception.ImageNotFound: Image 45d853a9-d2c7-4285-a194-2ae06b428b35 could not be found. [ 950.959083] env[62503]: DEBUG oslo_concurrency.lockutils [None req-edd647b4-5eaa-43a5-af02-1232c3b4376c tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 951.016341] env[62503]: DEBUG nova.compute.manager [req-efd93e28-e02a-4155-8091-5cf8e8793840 req-e17b4d3e-4adc-45e3-9552-af4c2709f588 service nova] [instance: ce9c4d77-6cbe-411d-a0fd-d77a6d6f36b7] Received event network-changed-228b554a-2e8c-4413-833d-65514ca1285a {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 951.016564] env[62503]: DEBUG nova.compute.manager [req-efd93e28-e02a-4155-8091-5cf8e8793840 req-e17b4d3e-4adc-45e3-9552-af4c2709f588 service nova] [instance: ce9c4d77-6cbe-411d-a0fd-d77a6d6f36b7] Refreshing instance network info cache due to event network-changed-228b554a-2e8c-4413-833d-65514ca1285a. {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11432}} [ 951.016799] env[62503]: DEBUG oslo_concurrency.lockutils [req-efd93e28-e02a-4155-8091-5cf8e8793840 req-e17b4d3e-4adc-45e3-9552-af4c2709f588 service nova] Acquiring lock "refresh_cache-ce9c4d77-6cbe-411d-a0fd-d77a6d6f36b7" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 951.017022] env[62503]: DEBUG oslo_concurrency.lockutils [req-efd93e28-e02a-4155-8091-5cf8e8793840 req-e17b4d3e-4adc-45e3-9552-af4c2709f588 service nova] Acquired lock "refresh_cache-ce9c4d77-6cbe-411d-a0fd-d77a6d6f36b7" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 951.017121] env[62503]: DEBUG nova.network.neutron [req-efd93e28-e02a-4155-8091-5cf8e8793840 req-e17b4d3e-4adc-45e3-9552-af4c2709f588 service nova] [instance: ce9c4d77-6cbe-411d-a0fd-d77a6d6f36b7] Refreshing network info cache for port 228b554a-2e8c-4413-833d-65514ca1285a {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 951.019628] env[62503]: DEBUG nova.compute.utils [None req-93e53ce4-ad3e-439a-926c-0196bf562468 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Using /dev/sd instead of None {{(pid=62503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 951.024159] env[62503]: DEBUG nova.compute.manager [None req-93e53ce4-ad3e-439a-926c-0196bf562468 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: 7eb8e049-dd65-43bd-829a-8f773f7ad156] Allocating IP information in the background. {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 951.024332] env[62503]: DEBUG nova.network.neutron [None req-93e53ce4-ad3e-439a-926c-0196bf562468 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: 7eb8e049-dd65-43bd-829a-8f773f7ad156] allocate_for_instance() {{(pid=62503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 951.106453] env[62503]: DEBUG nova.policy [None req-93e53ce4-ad3e-439a-926c-0196bf562468 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '05e8f1583c434155ae657655880ba2c6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '44139c74b4b349af996a67f408a8441f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62503) authorize /opt/stack/nova/nova/policy.py:201}} [ 951.147454] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9f55a22-1579-408b-8eab-2a3e093867d0 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.154838] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01300b5f-0911-473d-989a-d8068f617117 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.189260] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7760f5b9-5915-473b-95c8-8751e6833e3f {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.197335] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e27c4f5-f66a-4f4e-b57b-89c2e48e67e4 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.218385] env[62503]: DEBUG nova.compute.provider_tree [None req-52c126ff-63e1-418c-a270-6d77b9f52bbd tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 951.223757] env[62503]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388013, 'name': CreateVM_Task, 'duration_secs': 0.623421} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 951.223757] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5ba614a3-17be-4069-8219-f88f4d27aab9] Created VM on the ESX host {{(pid=62503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 951.224344] env[62503]: DEBUG oslo_concurrency.lockutils [None req-4d433f7d-faf4-4699-ab6f-6c22fad1a618 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 951.224538] env[62503]: DEBUG oslo_concurrency.lockutils [None req-4d433f7d-faf4-4699-ab6f-6c22fad1a618 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Acquired lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 951.224893] env[62503]: DEBUG oslo_concurrency.lockutils [None req-4d433f7d-faf4-4699-ab6f-6c22fad1a618 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 951.226064] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-91466264-8eb2-4cde-9a45-73d81faa3efb {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.230815] env[62503]: DEBUG oslo_vmware.api [None req-4d433f7d-faf4-4699-ab6f-6c22fad1a618 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Waiting for the task: (returnval){ [ 951.230815] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]529c493e-8267-fd2b-8aed-b6e0cab73d5e" [ 951.230815] env[62503]: _type = "Task" [ 951.230815] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 951.244167] env[62503]: DEBUG oslo_vmware.api [None req-4d433f7d-faf4-4699-ab6f-6c22fad1a618 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]529c493e-8267-fd2b-8aed-b6e0cab73d5e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.248529] env[62503]: INFO nova.compute.manager [None req-74f2d880-0894-4c8b-96f9-91645aaf1004 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: 68f0c60d-ceff-4d7a-b81d-4845b4c5134c] Took 23.01 seconds to build instance. [ 951.322718] env[62503]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388014, 'name': CreateVM_Task} progress is 99%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.393035] env[62503]: DEBUG nova.network.neutron [None req-93e53ce4-ad3e-439a-926c-0196bf562468 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: 7eb8e049-dd65-43bd-829a-8f773f7ad156] Successfully created port: 077e8de3-2f79-490d-8629-d1d9ffd38862 {{(pid=62503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 951.521587] env[62503]: DEBUG nova.compute.manager [None req-93e53ce4-ad3e-439a-926c-0196bf562468 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: 7eb8e049-dd65-43bd-829a-8f773f7ad156] Start building block device mappings for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2867}} [ 951.725396] env[62503]: DEBUG nova.scheduler.client.report [None req-52c126ff-63e1-418c-a270-6d77b9f52bbd tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 951.746182] env[62503]: DEBUG oslo_vmware.api [None req-4d433f7d-faf4-4699-ab6f-6c22fad1a618 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]529c493e-8267-fd2b-8aed-b6e0cab73d5e, 'name': SearchDatastore_Task, 'duration_secs': 0.011017} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 951.748609] env[62503]: DEBUG oslo_concurrency.lockutils [None req-4d433f7d-faf4-4699-ab6f-6c22fad1a618 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Releasing lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 951.748895] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-4d433f7d-faf4-4699-ab6f-6c22fad1a618 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: 5ba614a3-17be-4069-8219-f88f4d27aab9] Processing image 8150ca02-f879-471d-8913-459408f127a1 {{(pid=62503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 951.749180] env[62503]: DEBUG oslo_concurrency.lockutils [None req-4d433f7d-faf4-4699-ab6f-6c22fad1a618 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 951.749337] env[62503]: DEBUG oslo_concurrency.lockutils [None req-4d433f7d-faf4-4699-ab6f-6c22fad1a618 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Acquired lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 951.749516] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-4d433f7d-faf4-4699-ab6f-6c22fad1a618 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 951.750183] env[62503]: DEBUG oslo_concurrency.lockutils [None req-74f2d880-0894-4c8b-96f9-91645aaf1004 tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Lock "68f0c60d-ceff-4d7a-b81d-4845b4c5134c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.515s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 951.750582] env[62503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f9c4aa89-d522-487d-956a-4d9452882d44 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.759357] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-4d433f7d-faf4-4699-ab6f-6c22fad1a618 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 951.759357] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-4d433f7d-faf4-4699-ab6f-6c22fad1a618 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 951.759767] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-47816b5d-3644-4218-806a-452cdeb6af5c {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.765032] env[62503]: DEBUG oslo_vmware.api [None req-4d433f7d-faf4-4699-ab6f-6c22fad1a618 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Waiting for the task: (returnval){ [ 951.765032] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52b96172-9c42-0f3e-5cda-d6eeee799c10" [ 951.765032] env[62503]: _type = "Task" [ 951.765032] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 951.774475] env[62503]: DEBUG oslo_vmware.api [None req-4d433f7d-faf4-4699-ab6f-6c22fad1a618 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52b96172-9c42-0f3e-5cda-d6eeee799c10, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.809355] env[62503]: DEBUG nova.network.neutron [req-efd93e28-e02a-4155-8091-5cf8e8793840 req-e17b4d3e-4adc-45e3-9552-af4c2709f588 service nova] [instance: ce9c4d77-6cbe-411d-a0fd-d77a6d6f36b7] Updated VIF entry in instance network info cache for port 228b554a-2e8c-4413-833d-65514ca1285a. {{(pid=62503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 951.809712] env[62503]: DEBUG nova.network.neutron [req-efd93e28-e02a-4155-8091-5cf8e8793840 req-e17b4d3e-4adc-45e3-9552-af4c2709f588 service nova] [instance: ce9c4d77-6cbe-411d-a0fd-d77a6d6f36b7] Updating instance_info_cache with network_info: [{"id": "228b554a-2e8c-4413-833d-65514ca1285a", "address": "fa:16:3e:bc:76:d8", "network": {"id": "ef338e2b-4aa8-4605-8748-910d0d3a3079", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-477257305-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "12a42517cf8f4ad3836f2f95e8833dd4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "13e71dbb-4279-427c-b39d-ba5df9895e58", "external-id": "nsx-vlan-transportzone-417", "segmentation_id": 417, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap228b554a-2e", "ovs_interfaceid": "228b554a-2e8c-4413-833d-65514ca1285a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 951.827677] env[62503]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388014, 'name': CreateVM_Task, 'duration_secs': 0.666277} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 951.827677] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ce9c4d77-6cbe-411d-a0fd-d77a6d6f36b7] Created VM on the ESX host {{(pid=62503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 951.827677] env[62503]: DEBUG oslo_concurrency.lockutils [None req-1f603411-3ca3-43e0-a6ad-82a61ccb75d8 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 951.827677] env[62503]: DEBUG oslo_concurrency.lockutils [None req-1f603411-3ca3-43e0-a6ad-82a61ccb75d8 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 951.827677] env[62503]: DEBUG oslo_concurrency.lockutils [None req-1f603411-3ca3-43e0-a6ad-82a61ccb75d8 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 951.827677] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-290c54b3-ffb8-49bb-a3a0-0ab1fc5f0432 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.832345] env[62503]: DEBUG oslo_vmware.api [None req-1f603411-3ca3-43e0-a6ad-82a61ccb75d8 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Waiting for the task: (returnval){ [ 951.832345] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]527afcc5-0d58-b00b-5d71-f948d12ae2e4" [ 951.832345] env[62503]: _type = "Task" [ 951.832345] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 951.839581] env[62503]: DEBUG oslo_vmware.api [None req-1f603411-3ca3-43e0-a6ad-82a61ccb75d8 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]527afcc5-0d58-b00b-5d71-f948d12ae2e4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.088397] env[62503]: DEBUG oslo_concurrency.lockutils [None req-2648dab7-5c73-481a-a2b5-ed36834fdfa2 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Acquiring lock "09ff6008-f1eb-4ee0-af7d-c7f8268e3eb9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 952.088686] env[62503]: DEBUG oslo_concurrency.lockutils [None req-2648dab7-5c73-481a-a2b5-ed36834fdfa2 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Lock "09ff6008-f1eb-4ee0-af7d-c7f8268e3eb9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 952.088903] env[62503]: DEBUG oslo_concurrency.lockutils [None req-2648dab7-5c73-481a-a2b5-ed36834fdfa2 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Acquiring lock "09ff6008-f1eb-4ee0-af7d-c7f8268e3eb9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 952.089137] env[62503]: DEBUG oslo_concurrency.lockutils [None req-2648dab7-5c73-481a-a2b5-ed36834fdfa2 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Lock "09ff6008-f1eb-4ee0-af7d-c7f8268e3eb9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 952.089334] env[62503]: DEBUG oslo_concurrency.lockutils [None req-2648dab7-5c73-481a-a2b5-ed36834fdfa2 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Lock "09ff6008-f1eb-4ee0-af7d-c7f8268e3eb9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 952.091540] env[62503]: INFO nova.compute.manager [None req-2648dab7-5c73-481a-a2b5-ed36834fdfa2 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: 09ff6008-f1eb-4ee0-af7d-c7f8268e3eb9] Terminating instance [ 952.093368] env[62503]: DEBUG nova.compute.manager [None req-2648dab7-5c73-481a-a2b5-ed36834fdfa2 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: 09ff6008-f1eb-4ee0-af7d-c7f8268e3eb9] Start destroying the instance on the hypervisor. {{(pid=62503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3156}} [ 952.093565] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-2648dab7-5c73-481a-a2b5-ed36834fdfa2 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: 09ff6008-f1eb-4ee0-af7d-c7f8268e3eb9] Destroying instance {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 952.094397] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72adf7b1-e348-4cd3-8352-574d947d642a {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.102227] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-2648dab7-5c73-481a-a2b5-ed36834fdfa2 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: 09ff6008-f1eb-4ee0-af7d-c7f8268e3eb9] Powering off the VM {{(pid=62503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 952.102472] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a34e5541-e1bc-49d6-89d1-32fb64cfef85 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.108160] env[62503]: DEBUG oslo_vmware.api [None req-2648dab7-5c73-481a-a2b5-ed36834fdfa2 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Waiting for the task: (returnval){ [ 952.108160] env[62503]: value = "task-1388015" [ 952.108160] env[62503]: _type = "Task" [ 952.108160] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 952.117018] env[62503]: DEBUG oslo_vmware.api [None req-2648dab7-5c73-481a-a2b5-ed36834fdfa2 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Task: {'id': task-1388015, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.230567] env[62503]: DEBUG oslo_concurrency.lockutils [None req-52c126ff-63e1-418c-a270-6d77b9f52bbd tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.717s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 952.233244] env[62503]: DEBUG oslo_concurrency.lockutils [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 1.654s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 952.258326] env[62503]: INFO nova.scheduler.client.report [None req-52c126ff-63e1-418c-a270-6d77b9f52bbd tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Deleted allocations for instance 629054bb-8fdb-45a2-8c07-216c4104d4a6 [ 952.275138] env[62503]: DEBUG oslo_vmware.api [None req-4d433f7d-faf4-4699-ab6f-6c22fad1a618 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52b96172-9c42-0f3e-5cda-d6eeee799c10, 'name': SearchDatastore_Task, 'duration_secs': 0.008536} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 952.276772] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4d6ec580-7c4d-4fe7-81cf-a9e927c92558 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.282389] env[62503]: DEBUG oslo_vmware.api [None req-4d433f7d-faf4-4699-ab6f-6c22fad1a618 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Waiting for the task: (returnval){ [ 952.282389] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]525b95df-e9bd-2c39-5e90-854b48763934" [ 952.282389] env[62503]: _type = "Task" [ 952.282389] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 952.290030] env[62503]: DEBUG oslo_vmware.api [None req-4d433f7d-faf4-4699-ab6f-6c22fad1a618 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]525b95df-e9bd-2c39-5e90-854b48763934, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.312016] env[62503]: DEBUG oslo_concurrency.lockutils [req-efd93e28-e02a-4155-8091-5cf8e8793840 req-e17b4d3e-4adc-45e3-9552-af4c2709f588 service nova] Releasing lock "refresh_cache-ce9c4d77-6cbe-411d-a0fd-d77a6d6f36b7" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 952.312346] env[62503]: DEBUG nova.compute.manager [req-efd93e28-e02a-4155-8091-5cf8e8793840 req-e17b4d3e-4adc-45e3-9552-af4c2709f588 service nova] [instance: e0df0ce5-1e88-4a39-8911-529b235f5b88] Received event network-vif-deleted-41418625-ee00-4672-80fc-df777f1e7301 {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 952.340969] env[62503]: DEBUG oslo_vmware.api [None req-1f603411-3ca3-43e0-a6ad-82a61ccb75d8 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]527afcc5-0d58-b00b-5d71-f948d12ae2e4, 'name': SearchDatastore_Task, 'duration_secs': 0.008905} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 952.341277] env[62503]: DEBUG oslo_concurrency.lockutils [None req-1f603411-3ca3-43e0-a6ad-82a61ccb75d8 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 952.341501] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-1f603411-3ca3-43e0-a6ad-82a61ccb75d8 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: ce9c4d77-6cbe-411d-a0fd-d77a6d6f36b7] Processing image 8150ca02-f879-471d-8913-459408f127a1 {{(pid=62503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 952.341735] env[62503]: DEBUG oslo_concurrency.lockutils [None req-1f603411-3ca3-43e0-a6ad-82a61ccb75d8 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 952.341885] env[62503]: DEBUG oslo_concurrency.lockutils [None req-1f603411-3ca3-43e0-a6ad-82a61ccb75d8 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 952.342074] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-1f603411-3ca3-43e0-a6ad-82a61ccb75d8 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 952.342346] env[62503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c7a96cd7-bb54-4e00-a00c-529607798458 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.349382] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-1f603411-3ca3-43e0-a6ad-82a61ccb75d8 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 952.349569] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-1f603411-3ca3-43e0-a6ad-82a61ccb75d8 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 952.350236] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0cabee5b-f6f5-42e0-8fd9-9a59e871bb2c {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.355152] env[62503]: DEBUG oslo_vmware.api [None req-1f603411-3ca3-43e0-a6ad-82a61ccb75d8 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Waiting for the task: (returnval){ [ 952.355152] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52c90296-8b04-0f70-7a71-095ffe19e729" [ 952.355152] env[62503]: _type = "Task" [ 952.355152] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 952.361857] env[62503]: DEBUG oslo_vmware.api [None req-1f603411-3ca3-43e0-a6ad-82a61ccb75d8 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52c90296-8b04-0f70-7a71-095ffe19e729, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.501393] env[62503]: DEBUG oslo_concurrency.lockutils [None req-7c9cfa9b-a701-4482-8aa9-8bbced75056a tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Acquiring lock "68f0c60d-ceff-4d7a-b81d-4845b4c5134c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 952.501668] env[62503]: DEBUG oslo_concurrency.lockutils [None req-7c9cfa9b-a701-4482-8aa9-8bbced75056a tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Lock "68f0c60d-ceff-4d7a-b81d-4845b4c5134c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 952.501880] env[62503]: DEBUG oslo_concurrency.lockutils [None req-7c9cfa9b-a701-4482-8aa9-8bbced75056a tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Acquiring lock "68f0c60d-ceff-4d7a-b81d-4845b4c5134c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 952.502092] env[62503]: DEBUG oslo_concurrency.lockutils [None req-7c9cfa9b-a701-4482-8aa9-8bbced75056a tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Lock "68f0c60d-ceff-4d7a-b81d-4845b4c5134c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 952.502271] env[62503]: DEBUG oslo_concurrency.lockutils [None req-7c9cfa9b-a701-4482-8aa9-8bbced75056a tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Lock "68f0c60d-ceff-4d7a-b81d-4845b4c5134c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 952.505479] env[62503]: INFO nova.compute.manager [None req-7c9cfa9b-a701-4482-8aa9-8bbced75056a tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: 68f0c60d-ceff-4d7a-b81d-4845b4c5134c] Terminating instance [ 952.507307] env[62503]: DEBUG nova.compute.manager [None req-7c9cfa9b-a701-4482-8aa9-8bbced75056a tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: 68f0c60d-ceff-4d7a-b81d-4845b4c5134c] Start destroying the instance on the hypervisor. {{(pid=62503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3156}} [ 952.507502] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-7c9cfa9b-a701-4482-8aa9-8bbced75056a tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: 68f0c60d-ceff-4d7a-b81d-4845b4c5134c] Destroying instance {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 952.508316] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6bb4913-07c6-4009-8a95-f122d6622e53 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.515876] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c9cfa9b-a701-4482-8aa9-8bbced75056a tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: 68f0c60d-ceff-4d7a-b81d-4845b4c5134c] Powering off the VM {{(pid=62503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 952.516134] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-aeeabe74-47bf-4a22-955a-8d73e972fc39 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.522318] env[62503]: DEBUG oslo_vmware.api [None req-7c9cfa9b-a701-4482-8aa9-8bbced75056a tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Waiting for the task: (returnval){ [ 952.522318] env[62503]: value = "task-1388016" [ 952.522318] env[62503]: _type = "Task" [ 952.522318] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 952.530572] env[62503]: DEBUG nova.compute.manager [None req-93e53ce4-ad3e-439a-926c-0196bf562468 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: 7eb8e049-dd65-43bd-829a-8f773f7ad156] Start spawning the instance on the hypervisor. {{(pid=62503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2641}} [ 952.532388] env[62503]: DEBUG oslo_vmware.api [None req-7c9cfa9b-a701-4482-8aa9-8bbced75056a tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Task: {'id': task-1388016, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.557911] env[62503]: DEBUG nova.virt.hardware [None req-93e53ce4-ad3e-439a-926c-0196bf562468 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:26:20Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:26:03Z,direct_url=,disk_format='vmdk',id=8150ca02-f879-471d-8913-459408f127a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26d9ee75d25b4018b8bb1fb11c8bd98c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:26:04Z,virtual_size=,visibility=), allow threads: False {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 952.558296] env[62503]: DEBUG nova.virt.hardware [None req-93e53ce4-ad3e-439a-926c-0196bf562468 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Flavor limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 952.558495] env[62503]: DEBUG nova.virt.hardware [None req-93e53ce4-ad3e-439a-926c-0196bf562468 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Image limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 952.558643] env[62503]: DEBUG nova.virt.hardware [None req-93e53ce4-ad3e-439a-926c-0196bf562468 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Flavor pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 952.558789] env[62503]: DEBUG nova.virt.hardware [None req-93e53ce4-ad3e-439a-926c-0196bf562468 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Image pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 952.559030] env[62503]: DEBUG nova.virt.hardware [None req-93e53ce4-ad3e-439a-926c-0196bf562468 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 952.559367] env[62503]: DEBUG nova.virt.hardware [None req-93e53ce4-ad3e-439a-926c-0196bf562468 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 952.559560] env[62503]: DEBUG nova.virt.hardware [None req-93e53ce4-ad3e-439a-926c-0196bf562468 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 952.559747] env[62503]: DEBUG nova.virt.hardware [None req-93e53ce4-ad3e-439a-926c-0196bf562468 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Got 1 possible topologies {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 952.559922] env[62503]: DEBUG nova.virt.hardware [None req-93e53ce4-ad3e-439a-926c-0196bf562468 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 952.560184] env[62503]: DEBUG nova.virt.hardware [None req-93e53ce4-ad3e-439a-926c-0196bf562468 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 952.561867] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25496a7e-efc5-49e0-9ed5-a058efc1906b {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.569935] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9bca1e0-e8e2-4a87-8bf7-6757593b957c {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.617981] env[62503]: DEBUG oslo_vmware.api [None req-2648dab7-5c73-481a-a2b5-ed36834fdfa2 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Task: {'id': task-1388015, 'name': PowerOffVM_Task, 'duration_secs': 0.157033} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 952.618271] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-2648dab7-5c73-481a-a2b5-ed36834fdfa2 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: 09ff6008-f1eb-4ee0-af7d-c7f8268e3eb9] Powered off the VM {{(pid=62503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 952.618461] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-2648dab7-5c73-481a-a2b5-ed36834fdfa2 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: 09ff6008-f1eb-4ee0-af7d-c7f8268e3eb9] Unregistering the VM {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 952.618730] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8e4a93bd-495f-40b8-a8b8-8fc995777a09 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.683957] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-2648dab7-5c73-481a-a2b5-ed36834fdfa2 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: 09ff6008-f1eb-4ee0-af7d-c7f8268e3eb9] Unregistered the VM {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 952.684284] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-2648dab7-5c73-481a-a2b5-ed36834fdfa2 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: 09ff6008-f1eb-4ee0-af7d-c7f8268e3eb9] Deleting contents of the VM from datastore datastore1 {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 952.684543] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-2648dab7-5c73-481a-a2b5-ed36834fdfa2 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Deleting the datastore file [datastore1] 09ff6008-f1eb-4ee0-af7d-c7f8268e3eb9 {{(pid=62503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 952.684894] env[62503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b2f36fe9-9cf1-4b7d-bb3f-04d11b736b24 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.692775] env[62503]: DEBUG oslo_vmware.api [None req-2648dab7-5c73-481a-a2b5-ed36834fdfa2 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Waiting for the task: (returnval){ [ 952.692775] env[62503]: value = "task-1388018" [ 952.692775] env[62503]: _type = "Task" [ 952.692775] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 952.702719] env[62503]: DEBUG oslo_vmware.api [None req-2648dab7-5c73-481a-a2b5-ed36834fdfa2 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Task: {'id': task-1388018, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.766527] env[62503]: DEBUG oslo_concurrency.lockutils [None req-52c126ff-63e1-418c-a270-6d77b9f52bbd tempest-ListServerFiltersTestJSON-1917602589 tempest-ListServerFiltersTestJSON-1917602589-project-member] Lock "629054bb-8fdb-45a2-8c07-216c4104d4a6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.270s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 952.793760] env[62503]: DEBUG oslo_vmware.api [None req-4d433f7d-faf4-4699-ab6f-6c22fad1a618 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]525b95df-e9bd-2c39-5e90-854b48763934, 'name': SearchDatastore_Task, 'duration_secs': 0.008265} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 952.794421] env[62503]: DEBUG oslo_concurrency.lockutils [None req-4d433f7d-faf4-4699-ab6f-6c22fad1a618 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Releasing lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 952.794421] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d433f7d-faf4-4699-ab6f-6c22fad1a618 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk to [datastore1] 5ba614a3-17be-4069-8219-f88f4d27aab9/5ba614a3-17be-4069-8219-f88f4d27aab9.vmdk {{(pid=62503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 952.794977] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-39fd995d-6979-4be4-b4cf-0183857de21b {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.801211] env[62503]: DEBUG oslo_vmware.api [None req-4d433f7d-faf4-4699-ab6f-6c22fad1a618 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Waiting for the task: (returnval){ [ 952.801211] env[62503]: value = "task-1388019" [ 952.801211] env[62503]: _type = "Task" [ 952.801211] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 952.810935] env[62503]: DEBUG oslo_vmware.api [None req-4d433f7d-faf4-4699-ab6f-6c22fad1a618 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1388019, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.866681] env[62503]: DEBUG oslo_vmware.api [None req-1f603411-3ca3-43e0-a6ad-82a61ccb75d8 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52c90296-8b04-0f70-7a71-095ffe19e729, 'name': SearchDatastore_Task, 'duration_secs': 0.009406} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 952.868830] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-800e5544-e8c5-4bb2-8d56-9e28100e7a5c {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.872837] env[62503]: DEBUG oslo_vmware.api [None req-1f603411-3ca3-43e0-a6ad-82a61ccb75d8 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Waiting for the task: (returnval){ [ 952.872837] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52c3d968-dde8-1a85-c989-9d46a0eee1b9" [ 952.872837] env[62503]: _type = "Task" [ 952.872837] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 952.880722] env[62503]: DEBUG oslo_vmware.api [None req-1f603411-3ca3-43e0-a6ad-82a61ccb75d8 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52c3d968-dde8-1a85-c989-9d46a0eee1b9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.909124] env[62503]: DEBUG nova.compute.manager [req-b81d416a-6c5c-4862-becf-2275ba81c1c5 req-7d932ecd-2b93-4783-96f9-11c8d7ef43d8 service nova] [instance: 7eb8e049-dd65-43bd-829a-8f773f7ad156] Received event network-vif-plugged-077e8de3-2f79-490d-8629-d1d9ffd38862 {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 952.909384] env[62503]: DEBUG oslo_concurrency.lockutils [req-b81d416a-6c5c-4862-becf-2275ba81c1c5 req-7d932ecd-2b93-4783-96f9-11c8d7ef43d8 service nova] Acquiring lock "7eb8e049-dd65-43bd-829a-8f773f7ad156-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 952.909630] env[62503]: DEBUG oslo_concurrency.lockutils [req-b81d416a-6c5c-4862-becf-2275ba81c1c5 req-7d932ecd-2b93-4783-96f9-11c8d7ef43d8 service nova] Lock "7eb8e049-dd65-43bd-829a-8f773f7ad156-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 952.909807] env[62503]: DEBUG oslo_concurrency.lockutils [req-b81d416a-6c5c-4862-becf-2275ba81c1c5 req-7d932ecd-2b93-4783-96f9-11c8d7ef43d8 service nova] Lock "7eb8e049-dd65-43bd-829a-8f773f7ad156-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 952.909993] env[62503]: DEBUG nova.compute.manager [req-b81d416a-6c5c-4862-becf-2275ba81c1c5 req-7d932ecd-2b93-4783-96f9-11c8d7ef43d8 service nova] [instance: 7eb8e049-dd65-43bd-829a-8f773f7ad156] No waiting events found dispatching network-vif-plugged-077e8de3-2f79-490d-8629-d1d9ffd38862 {{(pid=62503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 952.910761] env[62503]: WARNING nova.compute.manager [req-b81d416a-6c5c-4862-becf-2275ba81c1c5 req-7d932ecd-2b93-4783-96f9-11c8d7ef43d8 service nova] [instance: 7eb8e049-dd65-43bd-829a-8f773f7ad156] Received unexpected event network-vif-plugged-077e8de3-2f79-490d-8629-d1d9ffd38862 for instance with vm_state building and task_state spawning. [ 953.037621] env[62503]: DEBUG oslo_vmware.api [None req-7c9cfa9b-a701-4482-8aa9-8bbced75056a tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Task: {'id': task-1388016, 'name': PowerOffVM_Task, 'duration_secs': 0.210789} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 953.037621] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c9cfa9b-a701-4482-8aa9-8bbced75056a tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: 68f0c60d-ceff-4d7a-b81d-4845b4c5134c] Powered off the VM {{(pid=62503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 953.037621] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-7c9cfa9b-a701-4482-8aa9-8bbced75056a tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: 68f0c60d-ceff-4d7a-b81d-4845b4c5134c] Unregistering the VM {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 953.037959] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ac717af8-1592-4c9a-9d5e-87cc9ff74552 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.103795] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-7c9cfa9b-a701-4482-8aa9-8bbced75056a tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: 68f0c60d-ceff-4d7a-b81d-4845b4c5134c] Unregistered the VM {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 953.103795] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-7c9cfa9b-a701-4482-8aa9-8bbced75056a tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: 68f0c60d-ceff-4d7a-b81d-4845b4c5134c] Deleting contents of the VM from datastore datastore2 {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 953.103795] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-7c9cfa9b-a701-4482-8aa9-8bbced75056a tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Deleting the datastore file [datastore2] 68f0c60d-ceff-4d7a-b81d-4845b4c5134c {{(pid=62503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 953.103795] env[62503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-38fbeb67-1b14-4402-92dd-0e1c1a1d854b {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.112443] env[62503]: DEBUG oslo_vmware.api [None req-7c9cfa9b-a701-4482-8aa9-8bbced75056a tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Waiting for the task: (returnval){ [ 953.112443] env[62503]: value = "task-1388021" [ 953.112443] env[62503]: _type = "Task" [ 953.112443] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 953.119041] env[62503]: DEBUG oslo_vmware.api [None req-7c9cfa9b-a701-4482-8aa9-8bbced75056a tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Task: {'id': task-1388021, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.206237] env[62503]: DEBUG oslo_vmware.api [None req-2648dab7-5c73-481a-a2b5-ed36834fdfa2 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Task: {'id': task-1388018, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.140482} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 953.206919] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-2648dab7-5c73-481a-a2b5-ed36834fdfa2 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Deleted the datastore file {{(pid=62503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 953.207322] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-2648dab7-5c73-481a-a2b5-ed36834fdfa2 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: 09ff6008-f1eb-4ee0-af7d-c7f8268e3eb9] Deleted contents of the VM from datastore datastore1 {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 953.207653] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-2648dab7-5c73-481a-a2b5-ed36834fdfa2 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: 09ff6008-f1eb-4ee0-af7d-c7f8268e3eb9] Instance destroyed {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 953.207986] env[62503]: INFO nova.compute.manager [None req-2648dab7-5c73-481a-a2b5-ed36834fdfa2 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] [instance: 09ff6008-f1eb-4ee0-af7d-c7f8268e3eb9] Took 1.11 seconds to destroy the instance on the hypervisor. [ 953.208491] env[62503]: DEBUG oslo.service.loopingcall [None req-2648dab7-5c73-481a-a2b5-ed36834fdfa2 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 953.208963] env[62503]: DEBUG nova.compute.manager [-] [instance: 09ff6008-f1eb-4ee0-af7d-c7f8268e3eb9] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 953.209199] env[62503]: DEBUG nova.network.neutron [-] [instance: 09ff6008-f1eb-4ee0-af7d-c7f8268e3eb9] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 953.268401] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Instance b6fddb0d-70f5-433f-a0ef-0d6bffb35579 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 953.268563] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Instance e693bcc2-3883-466d-913c-831146ca81e7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 953.268690] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Instance 09ff6008-f1eb-4ee0-af7d-c7f8268e3eb9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 953.268810] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Instance 5ba614a3-17be-4069-8219-f88f4d27aab9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 953.269033] env[62503]: WARNING nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Instance e0df0ce5-1e88-4a39-8911-529b235f5b88 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 953.269208] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Instance 68f0c60d-ceff-4d7a-b81d-4845b4c5134c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 953.269329] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Instance ce9c4d77-6cbe-411d-a0fd-d77a6d6f36b7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 953.269445] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Instance 7eb8e049-dd65-43bd-829a-8f773f7ad156 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 953.269643] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Total usable vcpus: 48, total allocated vcpus: 7 {{(pid=62503) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 953.269804] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1856MB phys_disk=200GB used_disk=7GB total_vcpus=48 used_vcpus=7 pci_stats=[] {{(pid=62503) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 953.311900] env[62503]: DEBUG oslo_vmware.api [None req-4d433f7d-faf4-4699-ab6f-6c22fad1a618 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1388019, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.498027} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 953.312054] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d433f7d-faf4-4699-ab6f-6c22fad1a618 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk to [datastore1] 5ba614a3-17be-4069-8219-f88f4d27aab9/5ba614a3-17be-4069-8219-f88f4d27aab9.vmdk {{(pid=62503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 953.312292] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-4d433f7d-faf4-4699-ab6f-6c22fad1a618 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: 5ba614a3-17be-4069-8219-f88f4d27aab9] Extending root virtual disk to 1048576 {{(pid=62503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 953.312566] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5f96c3a7-2c69-4688-8373-6e996318879c {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.324679] env[62503]: DEBUG oslo_vmware.api [None req-4d433f7d-faf4-4699-ab6f-6c22fad1a618 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Waiting for the task: (returnval){ [ 953.324679] env[62503]: value = "task-1388022" [ 953.324679] env[62503]: _type = "Task" [ 953.324679] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 953.335690] env[62503]: DEBUG oslo_vmware.api [None req-4d433f7d-faf4-4699-ab6f-6c22fad1a618 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1388022, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.390172] env[62503]: DEBUG oslo_vmware.api [None req-1f603411-3ca3-43e0-a6ad-82a61ccb75d8 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52c3d968-dde8-1a85-c989-9d46a0eee1b9, 'name': SearchDatastore_Task, 'duration_secs': 0.00825} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 953.390460] env[62503]: DEBUG oslo_concurrency.lockutils [None req-1f603411-3ca3-43e0-a6ad-82a61ccb75d8 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 953.390726] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f603411-3ca3-43e0-a6ad-82a61ccb75d8 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk to [datastore2] ce9c4d77-6cbe-411d-a0fd-d77a6d6f36b7/ce9c4d77-6cbe-411d-a0fd-d77a6d6f36b7.vmdk {{(pid=62503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 953.391015] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e548192e-8380-4e68-a4f0-6fbfb2ebfd09 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.396885] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cf8124b-4594-466e-9bd9-e704ba82850d {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.400350] env[62503]: DEBUG oslo_vmware.api [None req-1f603411-3ca3-43e0-a6ad-82a61ccb75d8 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Waiting for the task: (returnval){ [ 953.400350] env[62503]: value = "task-1388023" [ 953.400350] env[62503]: _type = "Task" [ 953.400350] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 953.408679] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b27d422-a731-4208-94c9-a09bf0f00584 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.417308] env[62503]: DEBUG oslo_vmware.api [None req-1f603411-3ca3-43e0-a6ad-82a61ccb75d8 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Task: {'id': task-1388023, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.445423] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe87047d-583f-4af0-b65b-a5df1e38d17f {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.455086] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee966508-034f-43c8-b567-c5d0845ba8e7 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.472039] env[62503]: DEBUG nova.compute.provider_tree [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 953.474391] env[62503]: DEBUG nova.network.neutron [None req-93e53ce4-ad3e-439a-926c-0196bf562468 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: 7eb8e049-dd65-43bd-829a-8f773f7ad156] Successfully updated port: 077e8de3-2f79-490d-8629-d1d9ffd38862 {{(pid=62503) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 953.486560] env[62503]: DEBUG nova.compute.manager [req-c79a922a-10ca-42c1-b533-cc541d9b357e req-467e36b7-cfc0-4aab-aa5e-1f907cf5d786 service nova] [instance: 09ff6008-f1eb-4ee0-af7d-c7f8268e3eb9] Received event network-vif-deleted-814e714d-f9f3-47d3-a228-0f1fd7000eb1 {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 953.486826] env[62503]: INFO nova.compute.manager [req-c79a922a-10ca-42c1-b533-cc541d9b357e req-467e36b7-cfc0-4aab-aa5e-1f907cf5d786 service nova] [instance: 09ff6008-f1eb-4ee0-af7d-c7f8268e3eb9] Neutron deleted interface 814e714d-f9f3-47d3-a228-0f1fd7000eb1; detaching it from the instance and deleting it from the info cache [ 953.486907] env[62503]: DEBUG nova.network.neutron [req-c79a922a-10ca-42c1-b533-cc541d9b357e req-467e36b7-cfc0-4aab-aa5e-1f907cf5d786 service nova] [instance: 09ff6008-f1eb-4ee0-af7d-c7f8268e3eb9] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 953.494938] env[62503]: DEBUG nova.compute.manager [req-49bdcc3d-1b64-4112-9b0a-849d7a193b49 req-60342da2-e7e1-4e2b-be32-edbc6433df54 service nova] [instance: 7eb8e049-dd65-43bd-829a-8f773f7ad156] Received event network-changed-077e8de3-2f79-490d-8629-d1d9ffd38862 {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 953.494938] env[62503]: DEBUG nova.compute.manager [req-49bdcc3d-1b64-4112-9b0a-849d7a193b49 req-60342da2-e7e1-4e2b-be32-edbc6433df54 service nova] [instance: 7eb8e049-dd65-43bd-829a-8f773f7ad156] Refreshing instance network info cache due to event network-changed-077e8de3-2f79-490d-8629-d1d9ffd38862. {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11432}} [ 953.494938] env[62503]: DEBUG oslo_concurrency.lockutils [req-49bdcc3d-1b64-4112-9b0a-849d7a193b49 req-60342da2-e7e1-4e2b-be32-edbc6433df54 service nova] Acquiring lock "refresh_cache-7eb8e049-dd65-43bd-829a-8f773f7ad156" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 953.494938] env[62503]: DEBUG oslo_concurrency.lockutils [req-49bdcc3d-1b64-4112-9b0a-849d7a193b49 req-60342da2-e7e1-4e2b-be32-edbc6433df54 service nova] Acquired lock "refresh_cache-7eb8e049-dd65-43bd-829a-8f773f7ad156" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 953.494938] env[62503]: DEBUG nova.network.neutron [req-49bdcc3d-1b64-4112-9b0a-849d7a193b49 req-60342da2-e7e1-4e2b-be32-edbc6433df54 service nova] [instance: 7eb8e049-dd65-43bd-829a-8f773f7ad156] Refreshing network info cache for port 077e8de3-2f79-490d-8629-d1d9ffd38862 {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 953.621752] env[62503]: DEBUG oslo_vmware.api [None req-7c9cfa9b-a701-4482-8aa9-8bbced75056a tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Task: {'id': task-1388021, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.270367} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 953.622190] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-7c9cfa9b-a701-4482-8aa9-8bbced75056a tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Deleted the datastore file {{(pid=62503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 953.622238] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-7c9cfa9b-a701-4482-8aa9-8bbced75056a tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: 68f0c60d-ceff-4d7a-b81d-4845b4c5134c] Deleted contents of the VM from datastore datastore2 {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 953.622408] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-7c9cfa9b-a701-4482-8aa9-8bbced75056a tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: 68f0c60d-ceff-4d7a-b81d-4845b4c5134c] Instance destroyed {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 953.622633] env[62503]: INFO nova.compute.manager [None req-7c9cfa9b-a701-4482-8aa9-8bbced75056a tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] [instance: 68f0c60d-ceff-4d7a-b81d-4845b4c5134c] Took 1.12 seconds to destroy the instance on the hypervisor. [ 953.622911] env[62503]: DEBUG oslo.service.loopingcall [None req-7c9cfa9b-a701-4482-8aa9-8bbced75056a tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 953.623057] env[62503]: DEBUG nova.compute.manager [-] [instance: 68f0c60d-ceff-4d7a-b81d-4845b4c5134c] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 953.623155] env[62503]: DEBUG nova.network.neutron [-] [instance: 68f0c60d-ceff-4d7a-b81d-4845b4c5134c] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 953.837370] env[62503]: DEBUG oslo_vmware.api [None req-4d433f7d-faf4-4699-ab6f-6c22fad1a618 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1388022, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06804} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 953.837756] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-4d433f7d-faf4-4699-ab6f-6c22fad1a618 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: 5ba614a3-17be-4069-8219-f88f4d27aab9] Extended root virtual disk {{(pid=62503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 953.839097] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-053871a9-2cd0-40db-b6b9-b5508dfdf83d {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.877747] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-4d433f7d-faf4-4699-ab6f-6c22fad1a618 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: 5ba614a3-17be-4069-8219-f88f4d27aab9] Reconfiguring VM instance instance-00000055 to attach disk [datastore1] 5ba614a3-17be-4069-8219-f88f4d27aab9/5ba614a3-17be-4069-8219-f88f4d27aab9.vmdk or device None with type sparse {{(pid=62503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 953.878280] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ec820e53-b2e7-4a76-bc89-cb65f4a47d6e {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.917919] env[62503]: DEBUG oslo_vmware.api [None req-1f603411-3ca3-43e0-a6ad-82a61ccb75d8 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Task: {'id': task-1388023, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.921321] env[62503]: DEBUG oslo_vmware.api [None req-4d433f7d-faf4-4699-ab6f-6c22fad1a618 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Waiting for the task: (returnval){ [ 953.921321] env[62503]: value = "task-1388024" [ 953.921321] env[62503]: _type = "Task" [ 953.921321] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 953.934676] env[62503]: DEBUG oslo_vmware.api [None req-4d433f7d-faf4-4699-ab6f-6c22fad1a618 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1388024, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.949620] env[62503]: DEBUG nova.network.neutron [-] [instance: 09ff6008-f1eb-4ee0-af7d-c7f8268e3eb9] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 953.977207] env[62503]: DEBUG nova.scheduler.client.report [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 953.980877] env[62503]: DEBUG oslo_concurrency.lockutils [None req-93e53ce4-ad3e-439a-926c-0196bf562468 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Acquiring lock "refresh_cache-7eb8e049-dd65-43bd-829a-8f773f7ad156" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 953.991152] env[62503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-df611f76-235e-4b82-9445-20dab224b4d6 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.004584] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cda9403-7ae0-4352-ab4d-cc8d7eb183cf {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.042625] env[62503]: DEBUG nova.compute.manager [req-c79a922a-10ca-42c1-b533-cc541d9b357e req-467e36b7-cfc0-4aab-aa5e-1f907cf5d786 service nova] [instance: 09ff6008-f1eb-4ee0-af7d-c7f8268e3eb9] Detach interface failed, port_id=814e714d-f9f3-47d3-a228-0f1fd7000eb1, reason: Instance 09ff6008-f1eb-4ee0-af7d-c7f8268e3eb9 could not be found. {{(pid=62503) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11261}} [ 954.056828] env[62503]: DEBUG nova.network.neutron [req-49bdcc3d-1b64-4112-9b0a-849d7a193b49 req-60342da2-e7e1-4e2b-be32-edbc6433df54 service nova] [instance: 7eb8e049-dd65-43bd-829a-8f773f7ad156] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 954.143396] env[62503]: DEBUG nova.network.neutron [req-49bdcc3d-1b64-4112-9b0a-849d7a193b49 req-60342da2-e7e1-4e2b-be32-edbc6433df54 service nova] [instance: 7eb8e049-dd65-43bd-829a-8f773f7ad156] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 954.410213] env[62503]: DEBUG nova.network.neutron [-] [instance: 68f0c60d-ceff-4d7a-b81d-4845b4c5134c] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 954.415181] env[62503]: DEBUG oslo_vmware.api [None req-1f603411-3ca3-43e0-a6ad-82a61ccb75d8 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Task: {'id': task-1388023, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.594423} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 954.415662] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f603411-3ca3-43e0-a6ad-82a61ccb75d8 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk to [datastore2] ce9c4d77-6cbe-411d-a0fd-d77a6d6f36b7/ce9c4d77-6cbe-411d-a0fd-d77a6d6f36b7.vmdk {{(pid=62503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 954.415882] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-1f603411-3ca3-43e0-a6ad-82a61ccb75d8 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: ce9c4d77-6cbe-411d-a0fd-d77a6d6f36b7] Extending root virtual disk to 1048576 {{(pid=62503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 954.416152] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-dbf3f7cb-3bb1-4588-9ee8-95760cef292f {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.423069] env[62503]: DEBUG oslo_vmware.api [None req-1f603411-3ca3-43e0-a6ad-82a61ccb75d8 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Waiting for the task: (returnval){ [ 954.423069] env[62503]: value = "task-1388025" [ 954.423069] env[62503]: _type = "Task" [ 954.423069] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 954.436945] env[62503]: DEBUG oslo_vmware.api [None req-1f603411-3ca3-43e0-a6ad-82a61ccb75d8 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Task: {'id': task-1388025, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.437215] env[62503]: DEBUG oslo_vmware.api [None req-4d433f7d-faf4-4699-ab6f-6c22fad1a618 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1388024, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.452262] env[62503]: INFO nova.compute.manager [-] [instance: 09ff6008-f1eb-4ee0-af7d-c7f8268e3eb9] Took 1.24 seconds to deallocate network for instance. [ 954.483019] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62503) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 954.483302] env[62503]: DEBUG oslo_concurrency.lockutils [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.250s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 954.483622] env[62503]: DEBUG oslo_concurrency.lockutils [None req-edd647b4-5eaa-43a5-af02-1232c3b4376c tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.525s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 954.483842] env[62503]: DEBUG oslo_concurrency.lockutils [None req-edd647b4-5eaa-43a5-af02-1232c3b4376c tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 954.507113] env[62503]: INFO nova.scheduler.client.report [None req-edd647b4-5eaa-43a5-af02-1232c3b4376c tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Deleted allocations for instance e0df0ce5-1e88-4a39-8911-529b235f5b88 [ 954.646496] env[62503]: DEBUG oslo_concurrency.lockutils [req-49bdcc3d-1b64-4112-9b0a-849d7a193b49 req-60342da2-e7e1-4e2b-be32-edbc6433df54 service nova] Releasing lock "refresh_cache-7eb8e049-dd65-43bd-829a-8f773f7ad156" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 954.646994] env[62503]: DEBUG oslo_concurrency.lockutils [None req-93e53ce4-ad3e-439a-926c-0196bf562468 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Acquired lock "refresh_cache-7eb8e049-dd65-43bd-829a-8f773f7ad156" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 954.647233] env[62503]: DEBUG nova.network.neutron [None req-93e53ce4-ad3e-439a-926c-0196bf562468 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: 7eb8e049-dd65-43bd-829a-8f773f7ad156] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 954.916672] env[62503]: INFO nova.compute.manager [-] [instance: 68f0c60d-ceff-4d7a-b81d-4845b4c5134c] Took 1.29 seconds to deallocate network for instance. [ 954.937300] env[62503]: DEBUG oslo_vmware.api [None req-4d433f7d-faf4-4699-ab6f-6c22fad1a618 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1388024, 'name': ReconfigVM_Task, 'duration_secs': 0.987528} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 954.940120] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-4d433f7d-faf4-4699-ab6f-6c22fad1a618 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: 5ba614a3-17be-4069-8219-f88f4d27aab9] Reconfigured VM instance instance-00000055 to attach disk [datastore1] 5ba614a3-17be-4069-8219-f88f4d27aab9/5ba614a3-17be-4069-8219-f88f4d27aab9.vmdk or device None with type sparse {{(pid=62503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 954.940837] env[62503]: DEBUG oslo_vmware.api [None req-1f603411-3ca3-43e0-a6ad-82a61ccb75d8 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Task: {'id': task-1388025, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.283238} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 954.941157] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ab0025da-8c63-4aba-ab53-f5eeda7f622d {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.942876] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-1f603411-3ca3-43e0-a6ad-82a61ccb75d8 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: ce9c4d77-6cbe-411d-a0fd-d77a6d6f36b7] Extended root virtual disk {{(pid=62503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 954.943735] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6323ea76-87ad-4dc1-8c19-88e5af59e6a5 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.959059] env[62503]: DEBUG oslo_concurrency.lockutils [None req-2648dab7-5c73-481a-a2b5-ed36834fdfa2 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 954.959059] env[62503]: DEBUG oslo_concurrency.lockutils [None req-2648dab7-5c73-481a-a2b5-ed36834fdfa2 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 954.959059] env[62503]: DEBUG nova.objects.instance [None req-2648dab7-5c73-481a-a2b5-ed36834fdfa2 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Lazy-loading 'resources' on Instance uuid 09ff6008-f1eb-4ee0-af7d-c7f8268e3eb9 {{(pid=62503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 954.968483] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-1f603411-3ca3-43e0-a6ad-82a61ccb75d8 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: ce9c4d77-6cbe-411d-a0fd-d77a6d6f36b7] Reconfiguring VM instance instance-00000058 to attach disk [datastore2] ce9c4d77-6cbe-411d-a0fd-d77a6d6f36b7/ce9c4d77-6cbe-411d-a0fd-d77a6d6f36b7.vmdk or device None with type sparse {{(pid=62503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 954.970826] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6fe057b0-93b7-4f20-93b0-00d50fcdda83 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.986184] env[62503]: DEBUG oslo_vmware.api [None req-4d433f7d-faf4-4699-ab6f-6c22fad1a618 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Waiting for the task: (returnval){ [ 954.986184] env[62503]: value = "task-1388026" [ 954.986184] env[62503]: _type = "Task" [ 954.986184] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 954.991503] env[62503]: DEBUG oslo_vmware.api [None req-1f603411-3ca3-43e0-a6ad-82a61ccb75d8 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Waiting for the task: (returnval){ [ 954.991503] env[62503]: value = "task-1388027" [ 954.991503] env[62503]: _type = "Task" [ 954.991503] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 954.998481] env[62503]: DEBUG oslo_vmware.api [None req-4d433f7d-faf4-4699-ab6f-6c22fad1a618 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1388026, 'name': Rename_Task} progress is 14%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.003916] env[62503]: DEBUG oslo_vmware.api [None req-1f603411-3ca3-43e0-a6ad-82a61ccb75d8 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Task: {'id': task-1388027, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.015475] env[62503]: DEBUG oslo_concurrency.lockutils [None req-edd647b4-5eaa-43a5-af02-1232c3b4376c tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Lock "e0df0ce5-1e88-4a39-8911-529b235f5b88" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.493s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 955.200282] env[62503]: DEBUG nova.network.neutron [None req-93e53ce4-ad3e-439a-926c-0196bf562468 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: 7eb8e049-dd65-43bd-829a-8f773f7ad156] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 955.332341] env[62503]: DEBUG nova.network.neutron [None req-93e53ce4-ad3e-439a-926c-0196bf562468 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: 7eb8e049-dd65-43bd-829a-8f773f7ad156] Updating instance_info_cache with network_info: [{"id": "077e8de3-2f79-490d-8629-d1d9ffd38862", "address": "fa:16:3e:fb:53:04", "network": {"id": "ae12a8a2-c7af-40e0-878a-95b9a17d8356", "bridge": "br-int", "label": "tempest-ServersTestJSON-1776284685-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "44139c74b4b349af996a67f408a8441f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51ae336c-12cf-406a-b1ca-54e9ce553b3e", "external-id": "nsx-vlan-transportzone-30", "segmentation_id": 30, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap077e8de3-2f", "ovs_interfaceid": "077e8de3-2f79-490d-8629-d1d9ffd38862", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 955.430088] env[62503]: DEBUG oslo_concurrency.lockutils [None req-7c9cfa9b-a701-4482-8aa9-8bbced75056a tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 955.502953] env[62503]: DEBUG oslo_vmware.api [None req-4d433f7d-faf4-4699-ab6f-6c22fad1a618 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1388026, 'name': Rename_Task, 'duration_secs': 0.168571} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 955.503240] env[62503]: DEBUG oslo_vmware.api [None req-1f603411-3ca3-43e0-a6ad-82a61ccb75d8 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Task: {'id': task-1388027, 'name': ReconfigVM_Task, 'duration_secs': 0.365794} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 955.505747] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d433f7d-faf4-4699-ab6f-6c22fad1a618 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: 5ba614a3-17be-4069-8219-f88f4d27aab9] Powering on the VM {{(pid=62503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 955.506055] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-1f603411-3ca3-43e0-a6ad-82a61ccb75d8 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: ce9c4d77-6cbe-411d-a0fd-d77a6d6f36b7] Reconfigured VM instance instance-00000058 to attach disk [datastore2] ce9c4d77-6cbe-411d-a0fd-d77a6d6f36b7/ce9c4d77-6cbe-411d-a0fd-d77a6d6f36b7.vmdk or device None with type sparse {{(pid=62503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 955.506921] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-23f768ea-c049-4e87-9553-78bf384b58c9 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.508249] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-831bbd93-a9e5-4d29-8f16-27d37efad208 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.514327] env[62503]: DEBUG oslo_vmware.api [None req-1f603411-3ca3-43e0-a6ad-82a61ccb75d8 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Waiting for the task: (returnval){ [ 955.514327] env[62503]: value = "task-1388029" [ 955.514327] env[62503]: _type = "Task" [ 955.514327] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 955.515590] env[62503]: DEBUG oslo_vmware.api [None req-4d433f7d-faf4-4699-ab6f-6c22fad1a618 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Waiting for the task: (returnval){ [ 955.515590] env[62503]: value = "task-1388028" [ 955.515590] env[62503]: _type = "Task" [ 955.515590] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 955.525521] env[62503]: DEBUG nova.compute.manager [req-249ecee2-f615-4b6c-9b09-7ab2c2a2642c req-f083fc64-63a5-4cdb-9390-b15f5c002fce service nova] [instance: 68f0c60d-ceff-4d7a-b81d-4845b4c5134c] Received event network-vif-deleted-89edb450-124a-47e0-b611-2b5ffb36fc2b {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 955.531841] env[62503]: DEBUG oslo_vmware.api [None req-1f603411-3ca3-43e0-a6ad-82a61ccb75d8 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Task: {'id': task-1388029, 'name': Rename_Task} progress is 14%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.535060] env[62503]: DEBUG oslo_vmware.api [None req-4d433f7d-faf4-4699-ab6f-6c22fad1a618 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1388028, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.583102] env[62503]: DEBUG oslo_concurrency.lockutils [None req-94094471-0eba-470e-9dc9-1d5828422cf6 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Acquiring lock "12dff44c-ebb9-4fa3-8396-defcdb474152" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 955.583102] env[62503]: DEBUG oslo_concurrency.lockutils [None req-94094471-0eba-470e-9dc9-1d5828422cf6 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Lock "12dff44c-ebb9-4fa3-8396-defcdb474152" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 955.603930] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de396d2b-04f2-41cb-88d3-fd0a67baa210 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.611640] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d965880-5ea6-41d6-8084-c2fb0422b441 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.645853] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7011b7cc-d772-4845-8f13-ef085f470ae1 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.654297] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-033dba8f-c950-4174-a918-198fa2fb1d00 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.667751] env[62503]: DEBUG nova.compute.provider_tree [None req-2648dab7-5c73-481a-a2b5-ed36834fdfa2 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 955.834910] env[62503]: DEBUG oslo_concurrency.lockutils [None req-93e53ce4-ad3e-439a-926c-0196bf562468 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Releasing lock "refresh_cache-7eb8e049-dd65-43bd-829a-8f773f7ad156" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 955.835289] env[62503]: DEBUG nova.compute.manager [None req-93e53ce4-ad3e-439a-926c-0196bf562468 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: 7eb8e049-dd65-43bd-829a-8f773f7ad156] Instance network_info: |[{"id": "077e8de3-2f79-490d-8629-d1d9ffd38862", "address": "fa:16:3e:fb:53:04", "network": {"id": "ae12a8a2-c7af-40e0-878a-95b9a17d8356", "bridge": "br-int", "label": "tempest-ServersTestJSON-1776284685-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "44139c74b4b349af996a67f408a8441f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51ae336c-12cf-406a-b1ca-54e9ce553b3e", "external-id": "nsx-vlan-transportzone-30", "segmentation_id": 30, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap077e8de3-2f", "ovs_interfaceid": "077e8de3-2f79-490d-8629-d1d9ffd38862", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1999}} [ 955.835771] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-93e53ce4-ad3e-439a-926c-0196bf562468 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: 7eb8e049-dd65-43bd-829a-8f773f7ad156] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fb:53:04', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '51ae336c-12cf-406a-b1ca-54e9ce553b3e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '077e8de3-2f79-490d-8629-d1d9ffd38862', 'vif_model': 'vmxnet3'}] {{(pid=62503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 955.843152] env[62503]: DEBUG oslo.service.loopingcall [None req-93e53ce4-ad3e-439a-926c-0196bf562468 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 955.843365] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7eb8e049-dd65-43bd-829a-8f773f7ad156] Creating VM on the ESX host {{(pid=62503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 955.843588] env[62503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-18ddee4f-1989-466c-908b-6099122a7a8d {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.862957] env[62503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 955.862957] env[62503]: value = "task-1388030" [ 955.862957] env[62503]: _type = "Task" [ 955.862957] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 955.870492] env[62503]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388030, 'name': CreateVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.028293] env[62503]: DEBUG oslo_vmware.api [None req-1f603411-3ca3-43e0-a6ad-82a61ccb75d8 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Task: {'id': task-1388029, 'name': Rename_Task, 'duration_secs': 0.145373} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 956.031210] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f603411-3ca3-43e0-a6ad-82a61ccb75d8 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: ce9c4d77-6cbe-411d-a0fd-d77a6d6f36b7] Powering on the VM {{(pid=62503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 956.031489] env[62503]: DEBUG oslo_vmware.api [None req-4d433f7d-faf4-4699-ab6f-6c22fad1a618 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1388028, 'name': PowerOnVM_Task, 'duration_secs': 0.440216} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 956.031693] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-630b6891-0bcb-4f48-a03c-891c3ec5a6d0 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.033209] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d433f7d-faf4-4699-ab6f-6c22fad1a618 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: 5ba614a3-17be-4069-8219-f88f4d27aab9] Powered on the VM {{(pid=62503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 956.033434] env[62503]: DEBUG nova.compute.manager [None req-4d433f7d-faf4-4699-ab6f-6c22fad1a618 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: 5ba614a3-17be-4069-8219-f88f4d27aab9] Checking state {{(pid=62503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1794}} [ 956.034210] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a86a87b-44e9-49ea-a62b-0f3dc66c5874 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.043915] env[62503]: DEBUG oslo_vmware.api [None req-1f603411-3ca3-43e0-a6ad-82a61ccb75d8 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Waiting for the task: (returnval){ [ 956.043915] env[62503]: value = "task-1388031" [ 956.043915] env[62503]: _type = "Task" [ 956.043915] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 956.051330] env[62503]: DEBUG oslo_vmware.api [None req-1f603411-3ca3-43e0-a6ad-82a61ccb75d8 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Task: {'id': task-1388031, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.085221] env[62503]: DEBUG nova.compute.manager [None req-94094471-0eba-470e-9dc9-1d5828422cf6 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: 12dff44c-ebb9-4fa3-8396-defcdb474152] Starting instance... {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 956.170757] env[62503]: DEBUG nova.scheduler.client.report [None req-2648dab7-5c73-481a-a2b5-ed36834fdfa2 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 956.373054] env[62503]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388030, 'name': CreateVM_Task, 'duration_secs': 0.347136} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 956.373202] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7eb8e049-dd65-43bd-829a-8f773f7ad156] Created VM on the ESX host {{(pid=62503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 956.373897] env[62503]: DEBUG oslo_concurrency.lockutils [None req-93e53ce4-ad3e-439a-926c-0196bf562468 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 956.374099] env[62503]: DEBUG oslo_concurrency.lockutils [None req-93e53ce4-ad3e-439a-926c-0196bf562468 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Acquired lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 956.374430] env[62503]: DEBUG oslo_concurrency.lockutils [None req-93e53ce4-ad3e-439a-926c-0196bf562468 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 956.374716] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-084921cc-2aee-4794-b734-fab3bf9292a9 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.379130] env[62503]: DEBUG oslo_vmware.api [None req-93e53ce4-ad3e-439a-926c-0196bf562468 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Waiting for the task: (returnval){ [ 956.379130] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52ce46d3-679e-1475-6b1d-1ef86a1b0e02" [ 956.379130] env[62503]: _type = "Task" [ 956.379130] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 956.386286] env[62503]: DEBUG oslo_vmware.api [None req-93e53ce4-ad3e-439a-926c-0196bf562468 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52ce46d3-679e-1475-6b1d-1ef86a1b0e02, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.546412] env[62503]: INFO nova.compute.manager [None req-4d433f7d-faf4-4699-ab6f-6c22fad1a618 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: 5ba614a3-17be-4069-8219-f88f4d27aab9] bringing vm to original state: 'stopped' [ 956.557925] env[62503]: DEBUG oslo_vmware.api [None req-1f603411-3ca3-43e0-a6ad-82a61ccb75d8 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Task: {'id': task-1388031, 'name': PowerOnVM_Task, 'duration_secs': 0.46609} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 956.557925] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f603411-3ca3-43e0-a6ad-82a61ccb75d8 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: ce9c4d77-6cbe-411d-a0fd-d77a6d6f36b7] Powered on the VM {{(pid=62503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 956.557925] env[62503]: INFO nova.compute.manager [None req-1f603411-3ca3-43e0-a6ad-82a61ccb75d8 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: ce9c4d77-6cbe-411d-a0fd-d77a6d6f36b7] Took 8.07 seconds to spawn the instance on the hypervisor. [ 956.558140] env[62503]: DEBUG nova.compute.manager [None req-1f603411-3ca3-43e0-a6ad-82a61ccb75d8 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: ce9c4d77-6cbe-411d-a0fd-d77a6d6f36b7] Checking state {{(pid=62503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1794}} [ 956.558816] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81b97f9d-d993-4017-94b2-3143b6a8329d {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.608145] env[62503]: DEBUG oslo_concurrency.lockutils [None req-94094471-0eba-470e-9dc9-1d5828422cf6 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 956.676353] env[62503]: DEBUG oslo_concurrency.lockutils [None req-2648dab7-5c73-481a-a2b5-ed36834fdfa2 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.718s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 956.678555] env[62503]: DEBUG oslo_concurrency.lockutils [None req-7c9cfa9b-a701-4482-8aa9-8bbced75056a tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.249s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 956.678790] env[62503]: DEBUG nova.objects.instance [None req-7c9cfa9b-a701-4482-8aa9-8bbced75056a tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Lazy-loading 'resources' on Instance uuid 68f0c60d-ceff-4d7a-b81d-4845b4c5134c {{(pid=62503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 956.694692] env[62503]: INFO nova.scheduler.client.report [None req-2648dab7-5c73-481a-a2b5-ed36834fdfa2 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Deleted allocations for instance 09ff6008-f1eb-4ee0-af7d-c7f8268e3eb9 [ 956.889339] env[62503]: DEBUG oslo_vmware.api [None req-93e53ce4-ad3e-439a-926c-0196bf562468 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52ce46d3-679e-1475-6b1d-1ef86a1b0e02, 'name': SearchDatastore_Task, 'duration_secs': 0.011007} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 956.889588] env[62503]: DEBUG oslo_concurrency.lockutils [None req-93e53ce4-ad3e-439a-926c-0196bf562468 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Releasing lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 956.889824] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-93e53ce4-ad3e-439a-926c-0196bf562468 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: 7eb8e049-dd65-43bd-829a-8f773f7ad156] Processing image 8150ca02-f879-471d-8913-459408f127a1 {{(pid=62503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 956.890070] env[62503]: DEBUG oslo_concurrency.lockutils [None req-93e53ce4-ad3e-439a-926c-0196bf562468 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 956.890228] env[62503]: DEBUG oslo_concurrency.lockutils [None req-93e53ce4-ad3e-439a-926c-0196bf562468 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Acquired lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 956.890441] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-93e53ce4-ad3e-439a-926c-0196bf562468 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 956.890708] env[62503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-65fcf29b-feda-4d1c-b59c-125874c4cfd8 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.899752] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-93e53ce4-ad3e-439a-926c-0196bf562468 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 956.899923] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-93e53ce4-ad3e-439a-926c-0196bf562468 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 956.900595] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-18482e4e-10b3-483c-8aac-ae1583376355 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.905178] env[62503]: DEBUG oslo_vmware.api [None req-93e53ce4-ad3e-439a-926c-0196bf562468 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Waiting for the task: (returnval){ [ 956.905178] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52b1096c-a119-b131-2892-b7dacd92694b" [ 956.905178] env[62503]: _type = "Task" [ 956.905178] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 956.911922] env[62503]: DEBUG oslo_vmware.api [None req-93e53ce4-ad3e-439a-926c-0196bf562468 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52b1096c-a119-b131-2892-b7dacd92694b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.075835] env[62503]: INFO nova.compute.manager [None req-1f603411-3ca3-43e0-a6ad-82a61ccb75d8 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: ce9c4d77-6cbe-411d-a0fd-d77a6d6f36b7] Took 18.16 seconds to build instance. [ 957.201606] env[62503]: DEBUG oslo_concurrency.lockutils [None req-2648dab7-5c73-481a-a2b5-ed36834fdfa2 tempest-ImagesTestJSON-14409066 tempest-ImagesTestJSON-14409066-project-member] Lock "09ff6008-f1eb-4ee0-af7d-c7f8268e3eb9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.113s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 957.268418] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a2150e7-04d9-45ea-95ad-8d82571c77cc {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.275967] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63d8d423-1dc8-4718-9ae0-54417bfe6721 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.306568] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f73af90e-0025-4260-a7ac-94377b623433 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.314662] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29933b85-60c6-4c2e-91ef-46d8d0bb3573 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.328141] env[62503]: DEBUG nova.compute.provider_tree [None req-7c9cfa9b-a701-4482-8aa9-8bbced75056a tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 957.416627] env[62503]: DEBUG oslo_vmware.api [None req-93e53ce4-ad3e-439a-926c-0196bf562468 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52b1096c-a119-b131-2892-b7dacd92694b, 'name': SearchDatastore_Task, 'duration_secs': 0.018987} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 957.417439] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-571136b5-d452-461a-a6ce-b9c0cace694a {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.422625] env[62503]: DEBUG oslo_vmware.api [None req-93e53ce4-ad3e-439a-926c-0196bf562468 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Waiting for the task: (returnval){ [ 957.422625] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52b373e3-b58e-56d3-2c91-ac65bc529806" [ 957.422625] env[62503]: _type = "Task" [ 957.422625] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 957.430037] env[62503]: DEBUG oslo_vmware.api [None req-93e53ce4-ad3e-439a-926c-0196bf562468 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52b373e3-b58e-56d3-2c91-ac65bc529806, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.556515] env[62503]: DEBUG oslo_concurrency.lockutils [None req-4d433f7d-faf4-4699-ab6f-6c22fad1a618 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Acquiring lock "5ba614a3-17be-4069-8219-f88f4d27aab9" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 957.556746] env[62503]: DEBUG oslo_concurrency.lockutils [None req-4d433f7d-faf4-4699-ab6f-6c22fad1a618 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Lock "5ba614a3-17be-4069-8219-f88f4d27aab9" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 957.557057] env[62503]: DEBUG nova.compute.manager [None req-4d433f7d-faf4-4699-ab6f-6c22fad1a618 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: 5ba614a3-17be-4069-8219-f88f4d27aab9] Checking state {{(pid=62503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1794}} [ 957.558393] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fd5b8e5-59bd-49a9-ae2e-d9a29b35959f {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.565412] env[62503]: DEBUG nova.compute.manager [None req-4d433f7d-faf4-4699-ab6f-6c22fad1a618 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: 5ba614a3-17be-4069-8219-f88f4d27aab9] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62503) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3375}} [ 957.578990] env[62503]: DEBUG oslo_concurrency.lockutils [None req-1f603411-3ca3-43e0-a6ad-82a61ccb75d8 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Lock "ce9c4d77-6cbe-411d-a0fd-d77a6d6f36b7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.667s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 957.834301] env[62503]: DEBUG nova.scheduler.client.report [None req-7c9cfa9b-a701-4482-8aa9-8bbced75056a tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 957.936499] env[62503]: DEBUG oslo_vmware.api [None req-93e53ce4-ad3e-439a-926c-0196bf562468 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52b373e3-b58e-56d3-2c91-ac65bc529806, 'name': SearchDatastore_Task, 'duration_secs': 0.020603} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 957.936819] env[62503]: DEBUG oslo_concurrency.lockutils [None req-93e53ce4-ad3e-439a-926c-0196bf562468 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Releasing lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 957.937469] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-93e53ce4-ad3e-439a-926c-0196bf562468 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk to [datastore1] 7eb8e049-dd65-43bd-829a-8f773f7ad156/7eb8e049-dd65-43bd-829a-8f773f7ad156.vmdk {{(pid=62503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 957.938043] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-58533c4f-e747-4db5-8fd9-4a3f8e2095ef {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.945973] env[62503]: DEBUG oslo_vmware.api [None req-93e53ce4-ad3e-439a-926c-0196bf562468 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Waiting for the task: (returnval){ [ 957.945973] env[62503]: value = "task-1388032" [ 957.945973] env[62503]: _type = "Task" [ 957.945973] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 957.956839] env[62503]: DEBUG oslo_vmware.api [None req-93e53ce4-ad3e-439a-926c-0196bf562468 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Task: {'id': task-1388032, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.970860] env[62503]: DEBUG nova.compute.manager [req-96084116-f7ef-4c29-b709-53c5e09d32f7 req-7a2c6430-9684-4656-9a4b-ff44f6e57ee1 service nova] [instance: ce9c4d77-6cbe-411d-a0fd-d77a6d6f36b7] Received event network-changed-228b554a-2e8c-4413-833d-65514ca1285a {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 957.970860] env[62503]: DEBUG nova.compute.manager [req-96084116-f7ef-4c29-b709-53c5e09d32f7 req-7a2c6430-9684-4656-9a4b-ff44f6e57ee1 service nova] [instance: ce9c4d77-6cbe-411d-a0fd-d77a6d6f36b7] Refreshing instance network info cache due to event network-changed-228b554a-2e8c-4413-833d-65514ca1285a. {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11432}} [ 957.971175] env[62503]: DEBUG oslo_concurrency.lockutils [req-96084116-f7ef-4c29-b709-53c5e09d32f7 req-7a2c6430-9684-4656-9a4b-ff44f6e57ee1 service nova] Acquiring lock "refresh_cache-ce9c4d77-6cbe-411d-a0fd-d77a6d6f36b7" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 957.971314] env[62503]: DEBUG oslo_concurrency.lockutils [req-96084116-f7ef-4c29-b709-53c5e09d32f7 req-7a2c6430-9684-4656-9a4b-ff44f6e57ee1 service nova] Acquired lock "refresh_cache-ce9c4d77-6cbe-411d-a0fd-d77a6d6f36b7" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 957.971523] env[62503]: DEBUG nova.network.neutron [req-96084116-f7ef-4c29-b709-53c5e09d32f7 req-7a2c6430-9684-4656-9a4b-ff44f6e57ee1 service nova] [instance: ce9c4d77-6cbe-411d-a0fd-d77a6d6f36b7] Refreshing network info cache for port 228b554a-2e8c-4413-833d-65514ca1285a {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 958.069825] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d433f7d-faf4-4699-ab6f-6c22fad1a618 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: 5ba614a3-17be-4069-8219-f88f4d27aab9] Powering off the VM {{(pid=62503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 958.070200] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-17ea4909-8e01-424f-8e19-7af9147d1caf {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.076734] env[62503]: DEBUG oslo_vmware.api [None req-4d433f7d-faf4-4699-ab6f-6c22fad1a618 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Waiting for the task: (returnval){ [ 958.076734] env[62503]: value = "task-1388033" [ 958.076734] env[62503]: _type = "Task" [ 958.076734] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 958.084420] env[62503]: DEBUG oslo_vmware.api [None req-4d433f7d-faf4-4699-ab6f-6c22fad1a618 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1388033, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.338095] env[62503]: DEBUG oslo_concurrency.lockutils [None req-7c9cfa9b-a701-4482-8aa9-8bbced75056a tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.659s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 958.340999] env[62503]: DEBUG oslo_concurrency.lockutils [None req-94094471-0eba-470e-9dc9-1d5828422cf6 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.733s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 958.350153] env[62503]: INFO nova.compute.claims [None req-94094471-0eba-470e-9dc9-1d5828422cf6 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: 12dff44c-ebb9-4fa3-8396-defcdb474152] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 958.374523] env[62503]: INFO nova.scheduler.client.report [None req-7c9cfa9b-a701-4482-8aa9-8bbced75056a tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Deleted allocations for instance 68f0c60d-ceff-4d7a-b81d-4845b4c5134c [ 958.459126] env[62503]: DEBUG oslo_vmware.api [None req-93e53ce4-ad3e-439a-926c-0196bf562468 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Task: {'id': task-1388032, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.469523} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 958.459126] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-93e53ce4-ad3e-439a-926c-0196bf562468 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk to [datastore1] 7eb8e049-dd65-43bd-829a-8f773f7ad156/7eb8e049-dd65-43bd-829a-8f773f7ad156.vmdk {{(pid=62503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 958.459126] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-93e53ce4-ad3e-439a-926c-0196bf562468 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: 7eb8e049-dd65-43bd-829a-8f773f7ad156] Extending root virtual disk to 1048576 {{(pid=62503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 958.459126] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-cf3fef39-40cd-4d05-93c0-d6370aec55c5 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.465458] env[62503]: DEBUG oslo_vmware.api [None req-93e53ce4-ad3e-439a-926c-0196bf562468 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Waiting for the task: (returnval){ [ 958.465458] env[62503]: value = "task-1388034" [ 958.465458] env[62503]: _type = "Task" [ 958.465458] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 958.477900] env[62503]: DEBUG oslo_vmware.api [None req-93e53ce4-ad3e-439a-926c-0196bf562468 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Task: {'id': task-1388034, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.587802] env[62503]: DEBUG oslo_vmware.api [None req-4d433f7d-faf4-4699-ab6f-6c22fad1a618 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1388033, 'name': PowerOffVM_Task, 'duration_secs': 0.429925} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 958.592020] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d433f7d-faf4-4699-ab6f-6c22fad1a618 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: 5ba614a3-17be-4069-8219-f88f4d27aab9] Powered off the VM {{(pid=62503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 958.592020] env[62503]: DEBUG nova.compute.manager [None req-4d433f7d-faf4-4699-ab6f-6c22fad1a618 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: 5ba614a3-17be-4069-8219-f88f4d27aab9] Checking state {{(pid=62503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1794}} [ 958.592020] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2187f184-5fef-4eef-9e89-620ef62ddbf6 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.884267] env[62503]: DEBUG oslo_concurrency.lockutils [None req-7c9cfa9b-a701-4482-8aa9-8bbced75056a tempest-ServerDiskConfigTestJSON-2078736902 tempest-ServerDiskConfigTestJSON-2078736902-project-member] Lock "68f0c60d-ceff-4d7a-b81d-4845b4c5134c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.382s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 958.896528] env[62503]: DEBUG nova.network.neutron [req-96084116-f7ef-4c29-b709-53c5e09d32f7 req-7a2c6430-9684-4656-9a4b-ff44f6e57ee1 service nova] [instance: ce9c4d77-6cbe-411d-a0fd-d77a6d6f36b7] Updated VIF entry in instance network info cache for port 228b554a-2e8c-4413-833d-65514ca1285a. {{(pid=62503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 958.896806] env[62503]: DEBUG nova.network.neutron [req-96084116-f7ef-4c29-b709-53c5e09d32f7 req-7a2c6430-9684-4656-9a4b-ff44f6e57ee1 service nova] [instance: ce9c4d77-6cbe-411d-a0fd-d77a6d6f36b7] Updating instance_info_cache with network_info: [{"id": "228b554a-2e8c-4413-833d-65514ca1285a", "address": "fa:16:3e:bc:76:d8", "network": {"id": "ef338e2b-4aa8-4605-8748-910d0d3a3079", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-477257305-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.197", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "12a42517cf8f4ad3836f2f95e8833dd4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "13e71dbb-4279-427c-b39d-ba5df9895e58", "external-id": "nsx-vlan-transportzone-417", "segmentation_id": 417, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap228b554a-2e", "ovs_interfaceid": "228b554a-2e8c-4413-833d-65514ca1285a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 958.983763] env[62503]: DEBUG oslo_vmware.api [None req-93e53ce4-ad3e-439a-926c-0196bf562468 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Task: {'id': task-1388034, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065211} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 958.984075] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-93e53ce4-ad3e-439a-926c-0196bf562468 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: 7eb8e049-dd65-43bd-829a-8f773f7ad156] Extended root virtual disk {{(pid=62503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 958.984892] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a85a791-a8a2-49b4-aa6f-85fbb0fa1fd6 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.008280] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-93e53ce4-ad3e-439a-926c-0196bf562468 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: 7eb8e049-dd65-43bd-829a-8f773f7ad156] Reconfiguring VM instance instance-00000059 to attach disk [datastore1] 7eb8e049-dd65-43bd-829a-8f773f7ad156/7eb8e049-dd65-43bd-829a-8f773f7ad156.vmdk or device None with type sparse {{(pid=62503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 959.009016] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-29238a7b-f7da-4d43-95cb-5c97425e5048 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.029438] env[62503]: DEBUG oslo_vmware.api [None req-93e53ce4-ad3e-439a-926c-0196bf562468 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Waiting for the task: (returnval){ [ 959.029438] env[62503]: value = "task-1388035" [ 959.029438] env[62503]: _type = "Task" [ 959.029438] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.039448] env[62503]: DEBUG oslo_vmware.api [None req-93e53ce4-ad3e-439a-926c-0196bf562468 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Task: {'id': task-1388035, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.113976] env[62503]: DEBUG oslo_concurrency.lockutils [None req-4d433f7d-faf4-4699-ab6f-6c22fad1a618 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Lock "5ba614a3-17be-4069-8219-f88f4d27aab9" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.557s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 959.400917] env[62503]: DEBUG oslo_concurrency.lockutils [req-96084116-f7ef-4c29-b709-53c5e09d32f7 req-7a2c6430-9684-4656-9a4b-ff44f6e57ee1 service nova] Releasing lock "refresh_cache-ce9c4d77-6cbe-411d-a0fd-d77a6d6f36b7" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 959.470414] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f9f6aa8-c7ef-430a-8a3d-5fb67c2a7d9e {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.479229] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2e924fc-950e-439d-b701-a96b3bf221c6 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.514428] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe485132-138e-4670-b61b-183a97d4f2d6 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.524593] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51661a28-41b3-4e4a-ad7e-76637fee71c5 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.541564] env[62503]: DEBUG nova.compute.provider_tree [None req-94094471-0eba-470e-9dc9-1d5828422cf6 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 959.548502] env[62503]: DEBUG oslo_vmware.api [None req-93e53ce4-ad3e-439a-926c-0196bf562468 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Task: {'id': task-1388035, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.624193] env[62503]: DEBUG oslo_concurrency.lockutils [None req-4d433f7d-faf4-4699-ab6f-6c22fad1a618 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 959.810017] env[62503]: DEBUG oslo_concurrency.lockutils [None req-79a4509c-27ab-4a97-bead-58bf15bc8d47 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Acquiring lock "5ba614a3-17be-4069-8219-f88f4d27aab9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 959.810017] env[62503]: DEBUG oslo_concurrency.lockutils [None req-79a4509c-27ab-4a97-bead-58bf15bc8d47 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Lock "5ba614a3-17be-4069-8219-f88f4d27aab9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 959.810017] env[62503]: DEBUG oslo_concurrency.lockutils [None req-79a4509c-27ab-4a97-bead-58bf15bc8d47 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Acquiring lock "5ba614a3-17be-4069-8219-f88f4d27aab9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 959.810017] env[62503]: DEBUG oslo_concurrency.lockutils [None req-79a4509c-27ab-4a97-bead-58bf15bc8d47 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Lock "5ba614a3-17be-4069-8219-f88f4d27aab9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 959.810017] env[62503]: DEBUG oslo_concurrency.lockutils [None req-79a4509c-27ab-4a97-bead-58bf15bc8d47 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Lock "5ba614a3-17be-4069-8219-f88f4d27aab9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 959.812440] env[62503]: INFO nova.compute.manager [None req-79a4509c-27ab-4a97-bead-58bf15bc8d47 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: 5ba614a3-17be-4069-8219-f88f4d27aab9] Terminating instance [ 959.816023] env[62503]: DEBUG nova.compute.manager [None req-79a4509c-27ab-4a97-bead-58bf15bc8d47 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: 5ba614a3-17be-4069-8219-f88f4d27aab9] Start destroying the instance on the hypervisor. {{(pid=62503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3156}} [ 959.816023] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-79a4509c-27ab-4a97-bead-58bf15bc8d47 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: 5ba614a3-17be-4069-8219-f88f4d27aab9] Destroying instance {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 959.819687] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f2c5d81-ede7-475e-acfe-ccaec8dc6a42 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.826131] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-79a4509c-27ab-4a97-bead-58bf15bc8d47 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: 5ba614a3-17be-4069-8219-f88f4d27aab9] Unregistering the VM {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 959.826131] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e5f5f5ba-f574-4fe5-9702-4836b0204427 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.890325] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-79a4509c-27ab-4a97-bead-58bf15bc8d47 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: 5ba614a3-17be-4069-8219-f88f4d27aab9] Unregistered the VM {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 959.891239] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-79a4509c-27ab-4a97-bead-58bf15bc8d47 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: 5ba614a3-17be-4069-8219-f88f4d27aab9] Deleting contents of the VM from datastore datastore1 {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 959.891239] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-79a4509c-27ab-4a97-bead-58bf15bc8d47 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Deleting the datastore file [datastore1] 5ba614a3-17be-4069-8219-f88f4d27aab9 {{(pid=62503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 959.891429] env[62503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e2ba5310-1d40-45da-986c-5da78972e065 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.898649] env[62503]: DEBUG oslo_vmware.api [None req-79a4509c-27ab-4a97-bead-58bf15bc8d47 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Waiting for the task: (returnval){ [ 959.898649] env[62503]: value = "task-1388037" [ 959.898649] env[62503]: _type = "Task" [ 959.898649] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.906354] env[62503]: DEBUG oslo_vmware.api [None req-79a4509c-27ab-4a97-bead-58bf15bc8d47 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1388037, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.039498] env[62503]: DEBUG oslo_vmware.api [None req-93e53ce4-ad3e-439a-926c-0196bf562468 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Task: {'id': task-1388035, 'name': ReconfigVM_Task, 'duration_secs': 0.627212} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 960.039773] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-93e53ce4-ad3e-439a-926c-0196bf562468 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: 7eb8e049-dd65-43bd-829a-8f773f7ad156] Reconfigured VM instance instance-00000059 to attach disk [datastore1] 7eb8e049-dd65-43bd-829a-8f773f7ad156/7eb8e049-dd65-43bd-829a-8f773f7ad156.vmdk or device None with type sparse {{(pid=62503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 960.040433] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0000020f-69a7-4f4b-b51a-aa8198ff597b {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.044981] env[62503]: DEBUG nova.scheduler.client.report [None req-94094471-0eba-470e-9dc9-1d5828422cf6 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 960.049185] env[62503]: DEBUG oslo_vmware.api [None req-93e53ce4-ad3e-439a-926c-0196bf562468 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Waiting for the task: (returnval){ [ 960.049185] env[62503]: value = "task-1388038" [ 960.049185] env[62503]: _type = "Task" [ 960.049185] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.057176] env[62503]: DEBUG oslo_vmware.api [None req-93e53ce4-ad3e-439a-926c-0196bf562468 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Task: {'id': task-1388038, 'name': Rename_Task} progress is 5%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.413296] env[62503]: DEBUG oslo_vmware.api [None req-79a4509c-27ab-4a97-bead-58bf15bc8d47 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1388037, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.132445} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 960.413656] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-79a4509c-27ab-4a97-bead-58bf15bc8d47 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Deleted the datastore file {{(pid=62503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 960.413909] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-79a4509c-27ab-4a97-bead-58bf15bc8d47 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: 5ba614a3-17be-4069-8219-f88f4d27aab9] Deleted contents of the VM from datastore datastore1 {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 960.414166] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-79a4509c-27ab-4a97-bead-58bf15bc8d47 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: 5ba614a3-17be-4069-8219-f88f4d27aab9] Instance destroyed {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 960.414415] env[62503]: INFO nova.compute.manager [None req-79a4509c-27ab-4a97-bead-58bf15bc8d47 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: 5ba614a3-17be-4069-8219-f88f4d27aab9] Took 0.60 seconds to destroy the instance on the hypervisor. [ 960.414765] env[62503]: DEBUG oslo.service.loopingcall [None req-79a4509c-27ab-4a97-bead-58bf15bc8d47 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 960.415039] env[62503]: DEBUG nova.compute.manager [-] [instance: 5ba614a3-17be-4069-8219-f88f4d27aab9] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 960.415169] env[62503]: DEBUG nova.network.neutron [-] [instance: 5ba614a3-17be-4069-8219-f88f4d27aab9] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 960.552651] env[62503]: DEBUG oslo_concurrency.lockutils [None req-94094471-0eba-470e-9dc9-1d5828422cf6 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.209s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 960.552651] env[62503]: DEBUG nova.compute.manager [None req-94094471-0eba-470e-9dc9-1d5828422cf6 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: 12dff44c-ebb9-4fa3-8396-defcdb474152] Start building networks asynchronously for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2832}} [ 960.553817] env[62503]: DEBUG oslo_concurrency.lockutils [None req-4d433f7d-faf4-4699-ab6f-6c22fad1a618 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 0.930s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 960.553911] env[62503]: DEBUG nova.objects.instance [None req-4d433f7d-faf4-4699-ab6f-6c22fad1a618 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: 5ba614a3-17be-4069-8219-f88f4d27aab9] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62503) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 960.566073] env[62503]: DEBUG oslo_vmware.api [None req-93e53ce4-ad3e-439a-926c-0196bf562468 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Task: {'id': task-1388038, 'name': Rename_Task, 'duration_secs': 0.42676} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 960.566073] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-93e53ce4-ad3e-439a-926c-0196bf562468 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: 7eb8e049-dd65-43bd-829a-8f773f7ad156] Powering on the VM {{(pid=62503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 960.566073] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b1159628-ce69-4b3c-a7b5-ffcb1809ae71 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.571225] env[62503]: DEBUG oslo_vmware.api [None req-93e53ce4-ad3e-439a-926c-0196bf562468 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Waiting for the task: (returnval){ [ 960.571225] env[62503]: value = "task-1388039" [ 960.571225] env[62503]: _type = "Task" [ 960.571225] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.578893] env[62503]: DEBUG oslo_vmware.api [None req-93e53ce4-ad3e-439a-926c-0196bf562468 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Task: {'id': task-1388039, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.674373] env[62503]: DEBUG nova.compute.manager [req-47a3615f-ffd6-4891-a371-2cd0ccc5e73f req-f0aec86b-9450-417b-a01f-951bf32ca00e service nova] [instance: 5ba614a3-17be-4069-8219-f88f4d27aab9] Received event network-vif-deleted-4be6b321-1129-4f1b-9ca4-aa83bc5b715d {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 960.674373] env[62503]: INFO nova.compute.manager [req-47a3615f-ffd6-4891-a371-2cd0ccc5e73f req-f0aec86b-9450-417b-a01f-951bf32ca00e service nova] [instance: 5ba614a3-17be-4069-8219-f88f4d27aab9] Neutron deleted interface 4be6b321-1129-4f1b-9ca4-aa83bc5b715d; detaching it from the instance and deleting it from the info cache [ 960.674373] env[62503]: DEBUG nova.network.neutron [req-47a3615f-ffd6-4891-a371-2cd0ccc5e73f req-f0aec86b-9450-417b-a01f-951bf32ca00e service nova] [instance: 5ba614a3-17be-4069-8219-f88f4d27aab9] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 961.061845] env[62503]: DEBUG nova.compute.utils [None req-94094471-0eba-470e-9dc9-1d5828422cf6 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Using /dev/sd instead of None {{(pid=62503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 961.067196] env[62503]: DEBUG nova.compute.manager [None req-94094471-0eba-470e-9dc9-1d5828422cf6 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: 12dff44c-ebb9-4fa3-8396-defcdb474152] Allocating IP information in the background. {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 961.067400] env[62503]: DEBUG nova.network.neutron [None req-94094471-0eba-470e-9dc9-1d5828422cf6 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: 12dff44c-ebb9-4fa3-8396-defcdb474152] allocate_for_instance() {{(pid=62503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 961.084288] env[62503]: DEBUG oslo_vmware.api [None req-93e53ce4-ad3e-439a-926c-0196bf562468 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Task: {'id': task-1388039, 'name': PowerOnVM_Task} progress is 81%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.143139] env[62503]: DEBUG nova.policy [None req-94094471-0eba-470e-9dc9-1d5828422cf6 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b0e90b6b4c414583af760c03e336e4d1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f823912f7b1a4998a6dbc22060cf6c5e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62503) authorize /opt/stack/nova/nova/policy.py:201}} [ 961.143981] env[62503]: DEBUG nova.network.neutron [-] [instance: 5ba614a3-17be-4069-8219-f88f4d27aab9] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 961.177562] env[62503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5817923a-3621-40a9-af60-ccb93455f033 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.187760] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e6142a5-6faf-42fd-905e-af7d03b044c3 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.216981] env[62503]: DEBUG nova.compute.manager [req-47a3615f-ffd6-4891-a371-2cd0ccc5e73f req-f0aec86b-9450-417b-a01f-951bf32ca00e service nova] [instance: 5ba614a3-17be-4069-8219-f88f4d27aab9] Detach interface failed, port_id=4be6b321-1129-4f1b-9ca4-aa83bc5b715d, reason: Instance 5ba614a3-17be-4069-8219-f88f4d27aab9 could not be found. {{(pid=62503) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11261}} [ 961.558246] env[62503]: DEBUG nova.network.neutron [None req-94094471-0eba-470e-9dc9-1d5828422cf6 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: 12dff44c-ebb9-4fa3-8396-defcdb474152] Successfully created port: 3907d209-7ee9-44ae-981d-06c45d389c82 {{(pid=62503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 961.568458] env[62503]: DEBUG nova.compute.manager [None req-94094471-0eba-470e-9dc9-1d5828422cf6 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: 12dff44c-ebb9-4fa3-8396-defcdb474152] Start building block device mappings for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2867}} [ 961.573345] env[62503]: DEBUG oslo_concurrency.lockutils [None req-4d433f7d-faf4-4699-ab6f-6c22fad1a618 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.020s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 961.589209] env[62503]: DEBUG oslo_vmware.api [None req-93e53ce4-ad3e-439a-926c-0196bf562468 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Task: {'id': task-1388039, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.648022] env[62503]: INFO nova.compute.manager [-] [instance: 5ba614a3-17be-4069-8219-f88f4d27aab9] Took 1.23 seconds to deallocate network for instance. [ 962.087949] env[62503]: DEBUG oslo_vmware.api [None req-93e53ce4-ad3e-439a-926c-0196bf562468 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Task: {'id': task-1388039, 'name': PowerOnVM_Task, 'duration_secs': 1.049893} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 962.088261] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-93e53ce4-ad3e-439a-926c-0196bf562468 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: 7eb8e049-dd65-43bd-829a-8f773f7ad156] Powered on the VM {{(pid=62503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 962.088418] env[62503]: INFO nova.compute.manager [None req-93e53ce4-ad3e-439a-926c-0196bf562468 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: 7eb8e049-dd65-43bd-829a-8f773f7ad156] Took 9.56 seconds to spawn the instance on the hypervisor. [ 962.088602] env[62503]: DEBUG nova.compute.manager [None req-93e53ce4-ad3e-439a-926c-0196bf562468 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: 7eb8e049-dd65-43bd-829a-8f773f7ad156] Checking state {{(pid=62503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1794}} [ 962.089561] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-baca8721-2021-4fa4-a72f-539dfda0fb4e {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.155536] env[62503]: DEBUG oslo_concurrency.lockutils [None req-79a4509c-27ab-4a97-bead-58bf15bc8d47 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 962.155890] env[62503]: DEBUG oslo_concurrency.lockutils [None req-79a4509c-27ab-4a97-bead-58bf15bc8d47 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 962.156049] env[62503]: DEBUG nova.objects.instance [None req-79a4509c-27ab-4a97-bead-58bf15bc8d47 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Lazy-loading 'resources' on Instance uuid 5ba614a3-17be-4069-8219-f88f4d27aab9 {{(pid=62503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 962.585234] env[62503]: DEBUG nova.compute.manager [None req-94094471-0eba-470e-9dc9-1d5828422cf6 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: 12dff44c-ebb9-4fa3-8396-defcdb474152] Start spawning the instance on the hypervisor. {{(pid=62503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2641}} [ 962.609987] env[62503]: INFO nova.compute.manager [None req-93e53ce4-ad3e-439a-926c-0196bf562468 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: 7eb8e049-dd65-43bd-829a-8f773f7ad156] Took 21.50 seconds to build instance. [ 962.624365] env[62503]: DEBUG nova.virt.hardware [None req-94094471-0eba-470e-9dc9-1d5828422cf6 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:26:20Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:26:03Z,direct_url=,disk_format='vmdk',id=8150ca02-f879-471d-8913-459408f127a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26d9ee75d25b4018b8bb1fb11c8bd98c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:26:04Z,virtual_size=,visibility=), allow threads: False {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 962.624657] env[62503]: DEBUG nova.virt.hardware [None req-94094471-0eba-470e-9dc9-1d5828422cf6 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Flavor limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 962.624885] env[62503]: DEBUG nova.virt.hardware [None req-94094471-0eba-470e-9dc9-1d5828422cf6 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Image limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 962.624962] env[62503]: DEBUG nova.virt.hardware [None req-94094471-0eba-470e-9dc9-1d5828422cf6 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Flavor pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 962.626598] env[62503]: DEBUG nova.virt.hardware [None req-94094471-0eba-470e-9dc9-1d5828422cf6 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Image pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 962.626598] env[62503]: DEBUG nova.virt.hardware [None req-94094471-0eba-470e-9dc9-1d5828422cf6 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 962.627038] env[62503]: DEBUG nova.virt.hardware [None req-94094471-0eba-470e-9dc9-1d5828422cf6 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 962.627249] env[62503]: DEBUG nova.virt.hardware [None req-94094471-0eba-470e-9dc9-1d5828422cf6 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 962.627434] env[62503]: DEBUG nova.virt.hardware [None req-94094471-0eba-470e-9dc9-1d5828422cf6 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Got 1 possible topologies {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 962.627610] env[62503]: DEBUG nova.virt.hardware [None req-94094471-0eba-470e-9dc9-1d5828422cf6 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 962.627871] env[62503]: DEBUG nova.virt.hardware [None req-94094471-0eba-470e-9dc9-1d5828422cf6 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 962.628794] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df330a72-e7e6-41b4-bdbb-0d7efefb2a66 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.641982] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e51ceaa-95e3-48fc-9497-a1b7ce675054 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.765085] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f851240-77c7-4df7-a836-991066c34a95 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.774030] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e2da503-6781-4062-8a91-2db4542f634b {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.813117] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50e8abc7-2d08-4456-b3b8-199eb04634f8 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.821970] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4885a25c-504c-4929-a03e-c7a13bfe5044 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.835680] env[62503]: DEBUG nova.compute.provider_tree [None req-79a4509c-27ab-4a97-bead-58bf15bc8d47 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 963.111406] env[62503]: DEBUG oslo_concurrency.lockutils [None req-93e53ce4-ad3e-439a-926c-0196bf562468 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Lock "7eb8e049-dd65-43bd-829a-8f773f7ad156" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.011s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 963.264341] env[62503]: DEBUG nova.compute.manager [req-ce9f4699-715d-4ebf-870c-483145ecfbf3 req-6aa9df12-15f4-4051-9201-6970bafaeb7f service nova] [instance: 12dff44c-ebb9-4fa3-8396-defcdb474152] Received event network-vif-plugged-3907d209-7ee9-44ae-981d-06c45d389c82 {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 963.264341] env[62503]: DEBUG oslo_concurrency.lockutils [req-ce9f4699-715d-4ebf-870c-483145ecfbf3 req-6aa9df12-15f4-4051-9201-6970bafaeb7f service nova] Acquiring lock "12dff44c-ebb9-4fa3-8396-defcdb474152-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 963.264341] env[62503]: DEBUG oslo_concurrency.lockutils [req-ce9f4699-715d-4ebf-870c-483145ecfbf3 req-6aa9df12-15f4-4051-9201-6970bafaeb7f service nova] Lock "12dff44c-ebb9-4fa3-8396-defcdb474152-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 963.264341] env[62503]: DEBUG oslo_concurrency.lockutils [req-ce9f4699-715d-4ebf-870c-483145ecfbf3 req-6aa9df12-15f4-4051-9201-6970bafaeb7f service nova] Lock "12dff44c-ebb9-4fa3-8396-defcdb474152-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 963.264341] env[62503]: DEBUG nova.compute.manager [req-ce9f4699-715d-4ebf-870c-483145ecfbf3 req-6aa9df12-15f4-4051-9201-6970bafaeb7f service nova] [instance: 12dff44c-ebb9-4fa3-8396-defcdb474152] No waiting events found dispatching network-vif-plugged-3907d209-7ee9-44ae-981d-06c45d389c82 {{(pid=62503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 963.265108] env[62503]: WARNING nova.compute.manager [req-ce9f4699-715d-4ebf-870c-483145ecfbf3 req-6aa9df12-15f4-4051-9201-6970bafaeb7f service nova] [instance: 12dff44c-ebb9-4fa3-8396-defcdb474152] Received unexpected event network-vif-plugged-3907d209-7ee9-44ae-981d-06c45d389c82 for instance with vm_state building and task_state spawning. [ 963.342216] env[62503]: DEBUG nova.scheduler.client.report [None req-79a4509c-27ab-4a97-bead-58bf15bc8d47 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 963.381939] env[62503]: DEBUG nova.network.neutron [None req-94094471-0eba-470e-9dc9-1d5828422cf6 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: 12dff44c-ebb9-4fa3-8396-defcdb474152] Successfully updated port: 3907d209-7ee9-44ae-981d-06c45d389c82 {{(pid=62503) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 963.846358] env[62503]: DEBUG oslo_concurrency.lockutils [None req-79a4509c-27ab-4a97-bead-58bf15bc8d47 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.690s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 963.865651] env[62503]: INFO nova.scheduler.client.report [None req-79a4509c-27ab-4a97-bead-58bf15bc8d47 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Deleted allocations for instance 5ba614a3-17be-4069-8219-f88f4d27aab9 [ 963.883617] env[62503]: DEBUG oslo_concurrency.lockutils [None req-94094471-0eba-470e-9dc9-1d5828422cf6 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Acquiring lock "refresh_cache-12dff44c-ebb9-4fa3-8396-defcdb474152" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 963.883786] env[62503]: DEBUG oslo_concurrency.lockutils [None req-94094471-0eba-470e-9dc9-1d5828422cf6 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Acquired lock "refresh_cache-12dff44c-ebb9-4fa3-8396-defcdb474152" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 963.883950] env[62503]: DEBUG nova.network.neutron [None req-94094471-0eba-470e-9dc9-1d5828422cf6 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: 12dff44c-ebb9-4fa3-8396-defcdb474152] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 964.228083] env[62503]: DEBUG oslo_concurrency.lockutils [None req-63bb133d-fd32-437e-9e45-f0ca22d8b321 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Acquiring lock "4d7f62b3-42d0-4f98-bac4-541f116c9709" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 964.228083] env[62503]: DEBUG oslo_concurrency.lockutils [None req-63bb133d-fd32-437e-9e45-f0ca22d8b321 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Lock "4d7f62b3-42d0-4f98-bac4-541f116c9709" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 964.377653] env[62503]: DEBUG oslo_concurrency.lockutils [None req-79a4509c-27ab-4a97-bead-58bf15bc8d47 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Lock "5ba614a3-17be-4069-8219-f88f4d27aab9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 4.570s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 964.379040] env[62503]: DEBUG oslo_concurrency.lockutils [None req-fd23b35c-41c1-495e-b29d-5f7df4b94656 tempest-ServersV294TestFqdnHostnames-893294453 tempest-ServersV294TestFqdnHostnames-893294453-project-member] Acquiring lock "bba6c92b-cac3-4677-a8f4-57a2704fc685" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 964.379278] env[62503]: DEBUG oslo_concurrency.lockutils [None req-fd23b35c-41c1-495e-b29d-5f7df4b94656 tempest-ServersV294TestFqdnHostnames-893294453 tempest-ServersV294TestFqdnHostnames-893294453-project-member] Lock "bba6c92b-cac3-4677-a8f4-57a2704fc685" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 964.422809] env[62503]: DEBUG nova.network.neutron [None req-94094471-0eba-470e-9dc9-1d5828422cf6 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: 12dff44c-ebb9-4fa3-8396-defcdb474152] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 964.592386] env[62503]: DEBUG nova.network.neutron [None req-94094471-0eba-470e-9dc9-1d5828422cf6 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: 12dff44c-ebb9-4fa3-8396-defcdb474152] Updating instance_info_cache with network_info: [{"id": "3907d209-7ee9-44ae-981d-06c45d389c82", "address": "fa:16:3e:c3:52:aa", "network": {"id": "fd4d9644-88d7-4c3d-9bee-9e25ca3a7d0b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2096443058-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f823912f7b1a4998a6dbc22060cf6c5e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "685b4083-b748-41fb-a68a-273b1073fa28", "external-id": "nsx-vlan-transportzone-312", "segmentation_id": 312, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3907d209-7e", "ovs_interfaceid": "3907d209-7ee9-44ae-981d-06c45d389c82", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 964.730509] env[62503]: DEBUG nova.compute.manager [None req-63bb133d-fd32-437e-9e45-f0ca22d8b321 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: 4d7f62b3-42d0-4f98-bac4-541f116c9709] Starting instance... {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 964.883151] env[62503]: DEBUG nova.compute.manager [None req-fd23b35c-41c1-495e-b29d-5f7df4b94656 tempest-ServersV294TestFqdnHostnames-893294453 tempest-ServersV294TestFqdnHostnames-893294453-project-member] [instance: bba6c92b-cac3-4677-a8f4-57a2704fc685] Starting instance... {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 964.971815] env[62503]: DEBUG oslo_concurrency.lockutils [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Acquiring lock "a4e7ec7f-82d1-4fd8-b9c4-bf869aa0968d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 964.971815] env[62503]: DEBUG oslo_concurrency.lockutils [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Lock "a4e7ec7f-82d1-4fd8-b9c4-bf869aa0968d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 965.010808] env[62503]: DEBUG oslo_concurrency.lockutils [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Acquiring lock "b120b29f-0e26-465f-bc6f-4214525ae2de" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 965.011268] env[62503]: DEBUG oslo_concurrency.lockutils [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Lock "b120b29f-0e26-465f-bc6f-4214525ae2de" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 965.100232] env[62503]: DEBUG oslo_concurrency.lockutils [None req-94094471-0eba-470e-9dc9-1d5828422cf6 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Releasing lock "refresh_cache-12dff44c-ebb9-4fa3-8396-defcdb474152" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 965.100232] env[62503]: DEBUG nova.compute.manager [None req-94094471-0eba-470e-9dc9-1d5828422cf6 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: 12dff44c-ebb9-4fa3-8396-defcdb474152] Instance network_info: |[{"id": "3907d209-7ee9-44ae-981d-06c45d389c82", "address": "fa:16:3e:c3:52:aa", "network": {"id": "fd4d9644-88d7-4c3d-9bee-9e25ca3a7d0b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2096443058-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f823912f7b1a4998a6dbc22060cf6c5e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "685b4083-b748-41fb-a68a-273b1073fa28", "external-id": "nsx-vlan-transportzone-312", "segmentation_id": 312, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3907d209-7e", "ovs_interfaceid": "3907d209-7ee9-44ae-981d-06c45d389c82", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1999}} [ 965.100232] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-94094471-0eba-470e-9dc9-1d5828422cf6 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: 12dff44c-ebb9-4fa3-8396-defcdb474152] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c3:52:aa', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '685b4083-b748-41fb-a68a-273b1073fa28', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3907d209-7ee9-44ae-981d-06c45d389c82', 'vif_model': 'vmxnet3'}] {{(pid=62503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 965.120171] env[62503]: DEBUG oslo.service.loopingcall [None req-94094471-0eba-470e-9dc9-1d5828422cf6 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 965.121226] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 12dff44c-ebb9-4fa3-8396-defcdb474152] Creating VM on the ESX host {{(pid=62503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 965.121226] env[62503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-50d88a10-f7bd-4eb6-9fbd-983159db6e26 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.145853] env[62503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 965.145853] env[62503]: value = "task-1388040" [ 965.145853] env[62503]: _type = "Task" [ 965.145853] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.154853] env[62503]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388040, 'name': CreateVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.261976] env[62503]: DEBUG oslo_concurrency.lockutils [None req-63bb133d-fd32-437e-9e45-f0ca22d8b321 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 965.262373] env[62503]: DEBUG oslo_concurrency.lockutils [None req-63bb133d-fd32-437e-9e45-f0ca22d8b321 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 965.264070] env[62503]: INFO nova.compute.claims [None req-63bb133d-fd32-437e-9e45-f0ca22d8b321 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: 4d7f62b3-42d0-4f98-bac4-541f116c9709] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 965.289057] env[62503]: DEBUG nova.compute.manager [req-26059de0-b106-4a31-969b-70b8eda4bbcb req-696fd17f-124f-4c5b-a7b4-41e5837d5cd9 service nova] [instance: 12dff44c-ebb9-4fa3-8396-defcdb474152] Received event network-changed-3907d209-7ee9-44ae-981d-06c45d389c82 {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 965.290798] env[62503]: DEBUG nova.compute.manager [req-26059de0-b106-4a31-969b-70b8eda4bbcb req-696fd17f-124f-4c5b-a7b4-41e5837d5cd9 service nova] [instance: 12dff44c-ebb9-4fa3-8396-defcdb474152] Refreshing instance network info cache due to event network-changed-3907d209-7ee9-44ae-981d-06c45d389c82. {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11432}} [ 965.290798] env[62503]: DEBUG oslo_concurrency.lockutils [req-26059de0-b106-4a31-969b-70b8eda4bbcb req-696fd17f-124f-4c5b-a7b4-41e5837d5cd9 service nova] Acquiring lock "refresh_cache-12dff44c-ebb9-4fa3-8396-defcdb474152" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 965.290798] env[62503]: DEBUG oslo_concurrency.lockutils [req-26059de0-b106-4a31-969b-70b8eda4bbcb req-696fd17f-124f-4c5b-a7b4-41e5837d5cd9 service nova] Acquired lock "refresh_cache-12dff44c-ebb9-4fa3-8396-defcdb474152" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 965.290798] env[62503]: DEBUG nova.network.neutron [req-26059de0-b106-4a31-969b-70b8eda4bbcb req-696fd17f-124f-4c5b-a7b4-41e5837d5cd9 service nova] [instance: 12dff44c-ebb9-4fa3-8396-defcdb474152] Refreshing network info cache for port 3907d209-7ee9-44ae-981d-06c45d389c82 {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 965.414730] env[62503]: DEBUG oslo_concurrency.lockutils [None req-fd23b35c-41c1-495e-b29d-5f7df4b94656 tempest-ServersV294TestFqdnHostnames-893294453 tempest-ServersV294TestFqdnHostnames-893294453-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 965.474527] env[62503]: DEBUG nova.compute.manager [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] [instance: a4e7ec7f-82d1-4fd8-b9c4-bf869aa0968d] Starting instance... {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 965.515371] env[62503]: DEBUG nova.compute.manager [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] [instance: b120b29f-0e26-465f-bc6f-4214525ae2de] Starting instance... {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 965.656032] env[62503]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388040, 'name': CreateVM_Task, 'duration_secs': 0.388532} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 965.656219] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 12dff44c-ebb9-4fa3-8396-defcdb474152] Created VM on the ESX host {{(pid=62503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 965.657055] env[62503]: DEBUG oslo_concurrency.lockutils [None req-94094471-0eba-470e-9dc9-1d5828422cf6 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 965.657168] env[62503]: DEBUG oslo_concurrency.lockutils [None req-94094471-0eba-470e-9dc9-1d5828422cf6 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Acquired lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 965.657391] env[62503]: DEBUG oslo_concurrency.lockutils [None req-94094471-0eba-470e-9dc9-1d5828422cf6 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 965.657646] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0bb41384-9231-44fa-8bbb-3362b4cf0b11 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.662416] env[62503]: DEBUG oslo_vmware.api [None req-94094471-0eba-470e-9dc9-1d5828422cf6 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Waiting for the task: (returnval){ [ 965.662416] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52875ec8-9f12-7f7b-da74-e3187e9053ed" [ 965.662416] env[62503]: _type = "Task" [ 965.662416] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.670187] env[62503]: DEBUG oslo_vmware.api [None req-94094471-0eba-470e-9dc9-1d5828422cf6 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52875ec8-9f12-7f7b-da74-e3187e9053ed, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.995982] env[62503]: DEBUG oslo_concurrency.lockutils [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 966.006331] env[62503]: DEBUG nova.network.neutron [req-26059de0-b106-4a31-969b-70b8eda4bbcb req-696fd17f-124f-4c5b-a7b4-41e5837d5cd9 service nova] [instance: 12dff44c-ebb9-4fa3-8396-defcdb474152] Updated VIF entry in instance network info cache for port 3907d209-7ee9-44ae-981d-06c45d389c82. {{(pid=62503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 966.006735] env[62503]: DEBUG nova.network.neutron [req-26059de0-b106-4a31-969b-70b8eda4bbcb req-696fd17f-124f-4c5b-a7b4-41e5837d5cd9 service nova] [instance: 12dff44c-ebb9-4fa3-8396-defcdb474152] Updating instance_info_cache with network_info: [{"id": "3907d209-7ee9-44ae-981d-06c45d389c82", "address": "fa:16:3e:c3:52:aa", "network": {"id": "fd4d9644-88d7-4c3d-9bee-9e25ca3a7d0b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2096443058-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f823912f7b1a4998a6dbc22060cf6c5e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "685b4083-b748-41fb-a68a-273b1073fa28", "external-id": "nsx-vlan-transportzone-312", "segmentation_id": 312, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3907d209-7e", "ovs_interfaceid": "3907d209-7ee9-44ae-981d-06c45d389c82", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 966.033655] env[62503]: DEBUG oslo_concurrency.lockutils [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 966.172294] env[62503]: DEBUG oslo_vmware.api [None req-94094471-0eba-470e-9dc9-1d5828422cf6 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52875ec8-9f12-7f7b-da74-e3187e9053ed, 'name': SearchDatastore_Task, 'duration_secs': 0.011903} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 966.172606] env[62503]: DEBUG oslo_concurrency.lockutils [None req-94094471-0eba-470e-9dc9-1d5828422cf6 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Releasing lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 966.172849] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-94094471-0eba-470e-9dc9-1d5828422cf6 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: 12dff44c-ebb9-4fa3-8396-defcdb474152] Processing image 8150ca02-f879-471d-8913-459408f127a1 {{(pid=62503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 966.173098] env[62503]: DEBUG oslo_concurrency.lockutils [None req-94094471-0eba-470e-9dc9-1d5828422cf6 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 966.173256] env[62503]: DEBUG oslo_concurrency.lockutils [None req-94094471-0eba-470e-9dc9-1d5828422cf6 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Acquired lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 966.173440] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-94094471-0eba-470e-9dc9-1d5828422cf6 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 966.173715] env[62503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1f6677d1-4064-4eb9-9231-38decfe1a8d2 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.181711] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-94094471-0eba-470e-9dc9-1d5828422cf6 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 966.181906] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-94094471-0eba-470e-9dc9-1d5828422cf6 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 966.182623] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bac3c92c-8dfc-42b7-a732-d5fe09ee07da {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.187328] env[62503]: DEBUG oslo_vmware.api [None req-94094471-0eba-470e-9dc9-1d5828422cf6 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Waiting for the task: (returnval){ [ 966.187328] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52912715-cf10-984e-df5d-e081aba8f5a1" [ 966.187328] env[62503]: _type = "Task" [ 966.187328] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 966.194783] env[62503]: DEBUG oslo_vmware.api [None req-94094471-0eba-470e-9dc9-1d5828422cf6 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52912715-cf10-984e-df5d-e081aba8f5a1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.278357] env[62503]: DEBUG oslo_concurrency.lockutils [None req-11342709-c4fc-4d03-8c6f-f5c2d7bba682 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Acquiring lock "ccc542a3-ff01-42ca-965e-706bed4c6e07" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 966.278633] env[62503]: DEBUG oslo_concurrency.lockutils [None req-11342709-c4fc-4d03-8c6f-f5c2d7bba682 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Lock "ccc542a3-ff01-42ca-965e-706bed4c6e07" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 966.386462] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5819332c-7f0c-4b64-a44d-78d7c631b5df {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.393895] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-128558d2-7e3c-4d89-aee2-4ad090a1c62d {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.422441] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45f3dc5c-60d9-4675-a78c-20bec3abd710 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.428954] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9009e534-607c-4be7-b94b-8d09a4d81255 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.443590] env[62503]: DEBUG nova.compute.provider_tree [None req-63bb133d-fd32-437e-9e45-f0ca22d8b321 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 966.509599] env[62503]: DEBUG oslo_concurrency.lockutils [req-26059de0-b106-4a31-969b-70b8eda4bbcb req-696fd17f-124f-4c5b-a7b4-41e5837d5cd9 service nova] Releasing lock "refresh_cache-12dff44c-ebb9-4fa3-8396-defcdb474152" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 966.698050] env[62503]: DEBUG oslo_vmware.api [None req-94094471-0eba-470e-9dc9-1d5828422cf6 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52912715-cf10-984e-df5d-e081aba8f5a1, 'name': SearchDatastore_Task, 'duration_secs': 0.008547} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 966.698777] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6ad509d3-fdc6-438e-83fa-bc7aa8129320 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.703767] env[62503]: DEBUG oslo_vmware.api [None req-94094471-0eba-470e-9dc9-1d5828422cf6 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Waiting for the task: (returnval){ [ 966.703767] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52631df6-5293-e81d-e6fd-4d40218cd8a0" [ 966.703767] env[62503]: _type = "Task" [ 966.703767] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 966.711055] env[62503]: DEBUG oslo_vmware.api [None req-94094471-0eba-470e-9dc9-1d5828422cf6 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52631df6-5293-e81d-e6fd-4d40218cd8a0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.781150] env[62503]: DEBUG nova.compute.manager [None req-11342709-c4fc-4d03-8c6f-f5c2d7bba682 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: ccc542a3-ff01-42ca-965e-706bed4c6e07] Starting instance... {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 966.949565] env[62503]: DEBUG nova.scheduler.client.report [None req-63bb133d-fd32-437e-9e45-f0ca22d8b321 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 967.213799] env[62503]: DEBUG oslo_vmware.api [None req-94094471-0eba-470e-9dc9-1d5828422cf6 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52631df6-5293-e81d-e6fd-4d40218cd8a0, 'name': SearchDatastore_Task, 'duration_secs': 0.00923} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 967.214039] env[62503]: DEBUG oslo_concurrency.lockutils [None req-94094471-0eba-470e-9dc9-1d5828422cf6 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Releasing lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 967.214335] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-94094471-0eba-470e-9dc9-1d5828422cf6 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk to [datastore1] 12dff44c-ebb9-4fa3-8396-defcdb474152/12dff44c-ebb9-4fa3-8396-defcdb474152.vmdk {{(pid=62503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 967.214606] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9d592169-f5ac-45a4-ac98-cf7de8637aa0 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.220715] env[62503]: DEBUG oslo_vmware.api [None req-94094471-0eba-470e-9dc9-1d5828422cf6 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Waiting for the task: (returnval){ [ 967.220715] env[62503]: value = "task-1388041" [ 967.220715] env[62503]: _type = "Task" [ 967.220715] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 967.228105] env[62503]: DEBUG oslo_vmware.api [None req-94094471-0eba-470e-9dc9-1d5828422cf6 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Task: {'id': task-1388041, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.305550] env[62503]: DEBUG oslo_concurrency.lockutils [None req-11342709-c4fc-4d03-8c6f-f5c2d7bba682 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 967.454594] env[62503]: DEBUG oslo_concurrency.lockutils [None req-63bb133d-fd32-437e-9e45-f0ca22d8b321 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.192s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 967.455147] env[62503]: DEBUG nova.compute.manager [None req-63bb133d-fd32-437e-9e45-f0ca22d8b321 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: 4d7f62b3-42d0-4f98-bac4-541f116c9709] Start building networks asynchronously for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2832}} [ 967.457876] env[62503]: DEBUG oslo_concurrency.lockutils [None req-fd23b35c-41c1-495e-b29d-5f7df4b94656 tempest-ServersV294TestFqdnHostnames-893294453 tempest-ServersV294TestFqdnHostnames-893294453-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.043s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 967.459970] env[62503]: INFO nova.compute.claims [None req-fd23b35c-41c1-495e-b29d-5f7df4b94656 tempest-ServersV294TestFqdnHostnames-893294453 tempest-ServersV294TestFqdnHostnames-893294453-project-member] [instance: bba6c92b-cac3-4677-a8f4-57a2704fc685] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 967.730672] env[62503]: DEBUG oslo_vmware.api [None req-94094471-0eba-470e-9dc9-1d5828422cf6 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Task: {'id': task-1388041, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.467094} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 967.730947] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-94094471-0eba-470e-9dc9-1d5828422cf6 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk to [datastore1] 12dff44c-ebb9-4fa3-8396-defcdb474152/12dff44c-ebb9-4fa3-8396-defcdb474152.vmdk {{(pid=62503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 967.731267] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-94094471-0eba-470e-9dc9-1d5828422cf6 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: 12dff44c-ebb9-4fa3-8396-defcdb474152] Extending root virtual disk to 1048576 {{(pid=62503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 967.731444] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7e7caa8d-0a24-4b89-be6b-a29753d6bdd0 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.737910] env[62503]: DEBUG oslo_vmware.api [None req-94094471-0eba-470e-9dc9-1d5828422cf6 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Waiting for the task: (returnval){ [ 967.737910] env[62503]: value = "task-1388042" [ 967.737910] env[62503]: _type = "Task" [ 967.737910] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 967.745105] env[62503]: DEBUG oslo_vmware.api [None req-94094471-0eba-470e-9dc9-1d5828422cf6 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Task: {'id': task-1388042, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.965306] env[62503]: DEBUG nova.compute.utils [None req-63bb133d-fd32-437e-9e45-f0ca22d8b321 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Using /dev/sd instead of None {{(pid=62503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 967.968609] env[62503]: DEBUG nova.compute.manager [None req-63bb133d-fd32-437e-9e45-f0ca22d8b321 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: 4d7f62b3-42d0-4f98-bac4-541f116c9709] Allocating IP information in the background. {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 967.968769] env[62503]: DEBUG nova.network.neutron [None req-63bb133d-fd32-437e-9e45-f0ca22d8b321 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: 4d7f62b3-42d0-4f98-bac4-541f116c9709] allocate_for_instance() {{(pid=62503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 968.006050] env[62503]: DEBUG nova.policy [None req-63bb133d-fd32-437e-9e45-f0ca22d8b321 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '05e8f1583c434155ae657655880ba2c6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '44139c74b4b349af996a67f408a8441f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62503) authorize /opt/stack/nova/nova/policy.py:201}} [ 968.249047] env[62503]: DEBUG oslo_vmware.api [None req-94094471-0eba-470e-9dc9-1d5828422cf6 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Task: {'id': task-1388042, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.060297} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 968.249535] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-94094471-0eba-470e-9dc9-1d5828422cf6 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: 12dff44c-ebb9-4fa3-8396-defcdb474152] Extended root virtual disk {{(pid=62503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 968.250398] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb7c8918-6781-4452-98b8-f0d8bb619209 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.255989] env[62503]: DEBUG nova.network.neutron [None req-63bb133d-fd32-437e-9e45-f0ca22d8b321 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: 4d7f62b3-42d0-4f98-bac4-541f116c9709] Successfully created port: 67894e0d-c4f4-47d4-bcac-520aa7a3626b {{(pid=62503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 968.276433] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-94094471-0eba-470e-9dc9-1d5828422cf6 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: 12dff44c-ebb9-4fa3-8396-defcdb474152] Reconfiguring VM instance instance-0000005a to attach disk [datastore1] 12dff44c-ebb9-4fa3-8396-defcdb474152/12dff44c-ebb9-4fa3-8396-defcdb474152.vmdk or device None with type sparse {{(pid=62503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 968.277065] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-54c3cf55-efac-47f9-9add-63ad74a13194 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.297630] env[62503]: DEBUG oslo_vmware.api [None req-94094471-0eba-470e-9dc9-1d5828422cf6 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Waiting for the task: (returnval){ [ 968.297630] env[62503]: value = "task-1388043" [ 968.297630] env[62503]: _type = "Task" [ 968.297630] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 968.305081] env[62503]: DEBUG oslo_vmware.api [None req-94094471-0eba-470e-9dc9-1d5828422cf6 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Task: {'id': task-1388043, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.469774] env[62503]: DEBUG nova.compute.manager [None req-63bb133d-fd32-437e-9e45-f0ca22d8b321 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: 4d7f62b3-42d0-4f98-bac4-541f116c9709] Start building block device mappings for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2867}} [ 968.617806] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d562e86f-a668-44f1-9c9d-79f5a8e7e4a0 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.625791] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d13db49-e00c-44e7-a51f-2204b4d7a94c {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.655896] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8df798d-6837-42c3-a6d4-5bc87244bd14 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.663161] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a299c4c-3b37-4dd0-a3c6-222b975807e6 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.675759] env[62503]: DEBUG nova.compute.provider_tree [None req-fd23b35c-41c1-495e-b29d-5f7df4b94656 tempest-ServersV294TestFqdnHostnames-893294453 tempest-ServersV294TestFqdnHostnames-893294453-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 968.807039] env[62503]: DEBUG oslo_vmware.api [None req-94094471-0eba-470e-9dc9-1d5828422cf6 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Task: {'id': task-1388043, 'name': ReconfigVM_Task, 'duration_secs': 0.247132} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 968.807325] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-94094471-0eba-470e-9dc9-1d5828422cf6 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: 12dff44c-ebb9-4fa3-8396-defcdb474152] Reconfigured VM instance instance-0000005a to attach disk [datastore1] 12dff44c-ebb9-4fa3-8396-defcdb474152/12dff44c-ebb9-4fa3-8396-defcdb474152.vmdk or device None with type sparse {{(pid=62503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 968.808025] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3d705832-602f-487f-bbf2-6d26a6e12fe5 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.814415] env[62503]: DEBUG oslo_vmware.api [None req-94094471-0eba-470e-9dc9-1d5828422cf6 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Waiting for the task: (returnval){ [ 968.814415] env[62503]: value = "task-1388044" [ 968.814415] env[62503]: _type = "Task" [ 968.814415] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 968.822932] env[62503]: DEBUG oslo_vmware.api [None req-94094471-0eba-470e-9dc9-1d5828422cf6 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Task: {'id': task-1388044, 'name': Rename_Task} progress is 5%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.178984] env[62503]: DEBUG nova.scheduler.client.report [None req-fd23b35c-41c1-495e-b29d-5f7df4b94656 tempest-ServersV294TestFqdnHostnames-893294453 tempest-ServersV294TestFqdnHostnames-893294453-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 969.324552] env[62503]: DEBUG oslo_vmware.api [None req-94094471-0eba-470e-9dc9-1d5828422cf6 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Task: {'id': task-1388044, 'name': Rename_Task, 'duration_secs': 0.140726} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 969.324734] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-94094471-0eba-470e-9dc9-1d5828422cf6 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: 12dff44c-ebb9-4fa3-8396-defcdb474152] Powering on the VM {{(pid=62503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 969.325355] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c917342d-0db7-4e71-a426-a64e5a2af9fc {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.331239] env[62503]: DEBUG oslo_vmware.api [None req-94094471-0eba-470e-9dc9-1d5828422cf6 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Waiting for the task: (returnval){ [ 969.331239] env[62503]: value = "task-1388045" [ 969.331239] env[62503]: _type = "Task" [ 969.331239] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 969.338711] env[62503]: DEBUG oslo_vmware.api [None req-94094471-0eba-470e-9dc9-1d5828422cf6 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Task: {'id': task-1388045, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.482173] env[62503]: DEBUG nova.compute.manager [None req-63bb133d-fd32-437e-9e45-f0ca22d8b321 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: 4d7f62b3-42d0-4f98-bac4-541f116c9709] Start spawning the instance on the hypervisor. {{(pid=62503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2641}} [ 969.508041] env[62503]: DEBUG nova.virt.hardware [None req-63bb133d-fd32-437e-9e45-f0ca22d8b321 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:26:20Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:26:03Z,direct_url=,disk_format='vmdk',id=8150ca02-f879-471d-8913-459408f127a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26d9ee75d25b4018b8bb1fb11c8bd98c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:26:04Z,virtual_size=,visibility=), allow threads: False {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 969.508320] env[62503]: DEBUG nova.virt.hardware [None req-63bb133d-fd32-437e-9e45-f0ca22d8b321 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Flavor limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 969.508483] env[62503]: DEBUG nova.virt.hardware [None req-63bb133d-fd32-437e-9e45-f0ca22d8b321 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Image limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 969.508675] env[62503]: DEBUG nova.virt.hardware [None req-63bb133d-fd32-437e-9e45-f0ca22d8b321 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Flavor pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 969.508825] env[62503]: DEBUG nova.virt.hardware [None req-63bb133d-fd32-437e-9e45-f0ca22d8b321 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Image pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 969.508972] env[62503]: DEBUG nova.virt.hardware [None req-63bb133d-fd32-437e-9e45-f0ca22d8b321 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 969.509201] env[62503]: DEBUG nova.virt.hardware [None req-63bb133d-fd32-437e-9e45-f0ca22d8b321 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 969.509387] env[62503]: DEBUG nova.virt.hardware [None req-63bb133d-fd32-437e-9e45-f0ca22d8b321 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 969.509574] env[62503]: DEBUG nova.virt.hardware [None req-63bb133d-fd32-437e-9e45-f0ca22d8b321 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Got 1 possible topologies {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 969.509742] env[62503]: DEBUG nova.virt.hardware [None req-63bb133d-fd32-437e-9e45-f0ca22d8b321 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 969.509913] env[62503]: DEBUG nova.virt.hardware [None req-63bb133d-fd32-437e-9e45-f0ca22d8b321 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 969.510760] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-757c27f4-eadd-4170-83b3-8300d22c7d46 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.518288] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d95e5712-0f8f-4442-8a16-00f52df4c408 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.687237] env[62503]: DEBUG oslo_concurrency.lockutils [None req-fd23b35c-41c1-495e-b29d-5f7df4b94656 tempest-ServersV294TestFqdnHostnames-893294453 tempest-ServersV294TestFqdnHostnames-893294453-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.229s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 969.687791] env[62503]: DEBUG nova.compute.manager [None req-fd23b35c-41c1-495e-b29d-5f7df4b94656 tempest-ServersV294TestFqdnHostnames-893294453 tempest-ServersV294TestFqdnHostnames-893294453-project-member] [instance: bba6c92b-cac3-4677-a8f4-57a2704fc685] Start building networks asynchronously for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2832}} [ 969.692235] env[62503]: DEBUG oslo_concurrency.lockutils [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.696s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 969.693672] env[62503]: INFO nova.compute.claims [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] [instance: a4e7ec7f-82d1-4fd8-b9c4-bf869aa0968d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 969.696978] env[62503]: DEBUG nova.compute.manager [req-9582d8ed-3231-4144-ae58-0da60e13bd33 req-a251d332-a9b8-4a3e-b624-97ade1de8190 service nova] [instance: 4d7f62b3-42d0-4f98-bac4-541f116c9709] Received event network-vif-plugged-67894e0d-c4f4-47d4-bcac-520aa7a3626b {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 969.697281] env[62503]: DEBUG oslo_concurrency.lockutils [req-9582d8ed-3231-4144-ae58-0da60e13bd33 req-a251d332-a9b8-4a3e-b624-97ade1de8190 service nova] Acquiring lock "4d7f62b3-42d0-4f98-bac4-541f116c9709-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 969.697520] env[62503]: DEBUG oslo_concurrency.lockutils [req-9582d8ed-3231-4144-ae58-0da60e13bd33 req-a251d332-a9b8-4a3e-b624-97ade1de8190 service nova] Lock "4d7f62b3-42d0-4f98-bac4-541f116c9709-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 969.697736] env[62503]: DEBUG oslo_concurrency.lockutils [req-9582d8ed-3231-4144-ae58-0da60e13bd33 req-a251d332-a9b8-4a3e-b624-97ade1de8190 service nova] Lock "4d7f62b3-42d0-4f98-bac4-541f116c9709-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 969.697969] env[62503]: DEBUG nova.compute.manager [req-9582d8ed-3231-4144-ae58-0da60e13bd33 req-a251d332-a9b8-4a3e-b624-97ade1de8190 service nova] [instance: 4d7f62b3-42d0-4f98-bac4-541f116c9709] No waiting events found dispatching network-vif-plugged-67894e0d-c4f4-47d4-bcac-520aa7a3626b {{(pid=62503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 969.698181] env[62503]: WARNING nova.compute.manager [req-9582d8ed-3231-4144-ae58-0da60e13bd33 req-a251d332-a9b8-4a3e-b624-97ade1de8190 service nova] [instance: 4d7f62b3-42d0-4f98-bac4-541f116c9709] Received unexpected event network-vif-plugged-67894e0d-c4f4-47d4-bcac-520aa7a3626b for instance with vm_state building and task_state spawning. [ 969.783325] env[62503]: DEBUG nova.network.neutron [None req-63bb133d-fd32-437e-9e45-f0ca22d8b321 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: 4d7f62b3-42d0-4f98-bac4-541f116c9709] Successfully updated port: 67894e0d-c4f4-47d4-bcac-520aa7a3626b {{(pid=62503) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 969.841694] env[62503]: DEBUG oslo_vmware.api [None req-94094471-0eba-470e-9dc9-1d5828422cf6 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Task: {'id': task-1388045, 'name': PowerOnVM_Task, 'duration_secs': 0.414952} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 969.841860] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-94094471-0eba-470e-9dc9-1d5828422cf6 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: 12dff44c-ebb9-4fa3-8396-defcdb474152] Powered on the VM {{(pid=62503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 969.841976] env[62503]: INFO nova.compute.manager [None req-94094471-0eba-470e-9dc9-1d5828422cf6 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: 12dff44c-ebb9-4fa3-8396-defcdb474152] Took 7.26 seconds to spawn the instance on the hypervisor. [ 969.842182] env[62503]: DEBUG nova.compute.manager [None req-94094471-0eba-470e-9dc9-1d5828422cf6 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: 12dff44c-ebb9-4fa3-8396-defcdb474152] Checking state {{(pid=62503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1794}} [ 969.842991] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec273fe5-df50-4f8c-a01e-c4c0fef93344 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.202499] env[62503]: DEBUG nova.compute.utils [None req-fd23b35c-41c1-495e-b29d-5f7df4b94656 tempest-ServersV294TestFqdnHostnames-893294453 tempest-ServersV294TestFqdnHostnames-893294453-project-member] Using /dev/sd instead of None {{(pid=62503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 970.204123] env[62503]: DEBUG nova.compute.manager [None req-fd23b35c-41c1-495e-b29d-5f7df4b94656 tempest-ServersV294TestFqdnHostnames-893294453 tempest-ServersV294TestFqdnHostnames-893294453-project-member] [instance: bba6c92b-cac3-4677-a8f4-57a2704fc685] Allocating IP information in the background. {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 970.204298] env[62503]: DEBUG nova.network.neutron [None req-fd23b35c-41c1-495e-b29d-5f7df4b94656 tempest-ServersV294TestFqdnHostnames-893294453 tempest-ServersV294TestFqdnHostnames-893294453-project-member] [instance: bba6c92b-cac3-4677-a8f4-57a2704fc685] allocate_for_instance() {{(pid=62503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 970.249221] env[62503]: DEBUG nova.policy [None req-fd23b35c-41c1-495e-b29d-5f7df4b94656 tempest-ServersV294TestFqdnHostnames-893294453 tempest-ServersV294TestFqdnHostnames-893294453-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c73da463102f46e59f5fa497a2359e8c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '93cccd8859f649f4a3444c42a8c188f5', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62503) authorize /opt/stack/nova/nova/policy.py:201}} [ 970.286033] env[62503]: DEBUG oslo_concurrency.lockutils [None req-63bb133d-fd32-437e-9e45-f0ca22d8b321 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Acquiring lock "refresh_cache-4d7f62b3-42d0-4f98-bac4-541f116c9709" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 970.286207] env[62503]: DEBUG oslo_concurrency.lockutils [None req-63bb133d-fd32-437e-9e45-f0ca22d8b321 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Acquired lock "refresh_cache-4d7f62b3-42d0-4f98-bac4-541f116c9709" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 970.286324] env[62503]: DEBUG nova.network.neutron [None req-63bb133d-fd32-437e-9e45-f0ca22d8b321 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: 4d7f62b3-42d0-4f98-bac4-541f116c9709] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 970.361236] env[62503]: INFO nova.compute.manager [None req-94094471-0eba-470e-9dc9-1d5828422cf6 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: 12dff44c-ebb9-4fa3-8396-defcdb474152] Took 13.77 seconds to build instance. [ 970.513770] env[62503]: DEBUG nova.network.neutron [None req-fd23b35c-41c1-495e-b29d-5f7df4b94656 tempest-ServersV294TestFqdnHostnames-893294453 tempest-ServersV294TestFqdnHostnames-893294453-project-member] [instance: bba6c92b-cac3-4677-a8f4-57a2704fc685] Successfully created port: b2c7fd20-28f2-42c2-9cdd-d3ad7d14169a {{(pid=62503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 970.708352] env[62503]: DEBUG nova.compute.manager [None req-fd23b35c-41c1-495e-b29d-5f7df4b94656 tempest-ServersV294TestFqdnHostnames-893294453 tempest-ServersV294TestFqdnHostnames-893294453-project-member] [instance: bba6c92b-cac3-4677-a8f4-57a2704fc685] Start building block device mappings for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2867}} [ 970.819653] env[62503]: DEBUG nova.network.neutron [None req-63bb133d-fd32-437e-9e45-f0ca22d8b321 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: 4d7f62b3-42d0-4f98-bac4-541f116c9709] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 970.865804] env[62503]: DEBUG oslo_concurrency.lockutils [None req-94094471-0eba-470e-9dc9-1d5828422cf6 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Lock "12dff44c-ebb9-4fa3-8396-defcdb474152" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.283s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 970.882587] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce920218-782c-47a2-9eae-a85380bc2913 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.890753] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a0cde5b-f063-419d-846e-1407b8c97313 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.922496] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e7e9999-8afd-41f0-bdad-ae9c93f83d03 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.929964] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c74b3d4b-9e26-4c94-83f8-2c3f9d14be8a {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.942897] env[62503]: DEBUG nova.compute.provider_tree [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 970.993858] env[62503]: DEBUG nova.network.neutron [None req-63bb133d-fd32-437e-9e45-f0ca22d8b321 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: 4d7f62b3-42d0-4f98-bac4-541f116c9709] Updating instance_info_cache with network_info: [{"id": "67894e0d-c4f4-47d4-bcac-520aa7a3626b", "address": "fa:16:3e:d3:aa:b1", "network": {"id": "ae12a8a2-c7af-40e0-878a-95b9a17d8356", "bridge": "br-int", "label": "tempest-ServersTestJSON-1776284685-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "44139c74b4b349af996a67f408a8441f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51ae336c-12cf-406a-b1ca-54e9ce553b3e", "external-id": "nsx-vlan-transportzone-30", "segmentation_id": 30, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap67894e0d-c4", "ovs_interfaceid": "67894e0d-c4f4-47d4-bcac-520aa7a3626b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 971.176187] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b80f79cc-8818-423c-8fb6-b9f19e48b84d {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.182513] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-4463d93d-15a0-436b-a7f0-308ffa1fba90 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: 12dff44c-ebb9-4fa3-8396-defcdb474152] Suspending the VM {{(pid=62503) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 971.182513] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-9f089347-4fe9-4544-9b2d-64a6db80f685 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.187618] env[62503]: DEBUG oslo_vmware.api [None req-4463d93d-15a0-436b-a7f0-308ffa1fba90 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Waiting for the task: (returnval){ [ 971.187618] env[62503]: value = "task-1388046" [ 971.187618] env[62503]: _type = "Task" [ 971.187618] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 971.197050] env[62503]: DEBUG oslo_vmware.api [None req-4463d93d-15a0-436b-a7f0-308ffa1fba90 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Task: {'id': task-1388046, 'name': SuspendVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.446733] env[62503]: DEBUG nova.scheduler.client.report [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 971.496349] env[62503]: DEBUG oslo_concurrency.lockutils [None req-63bb133d-fd32-437e-9e45-f0ca22d8b321 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Releasing lock "refresh_cache-4d7f62b3-42d0-4f98-bac4-541f116c9709" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 971.496686] env[62503]: DEBUG nova.compute.manager [None req-63bb133d-fd32-437e-9e45-f0ca22d8b321 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: 4d7f62b3-42d0-4f98-bac4-541f116c9709] Instance network_info: |[{"id": "67894e0d-c4f4-47d4-bcac-520aa7a3626b", "address": "fa:16:3e:d3:aa:b1", "network": {"id": "ae12a8a2-c7af-40e0-878a-95b9a17d8356", "bridge": "br-int", "label": "tempest-ServersTestJSON-1776284685-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "44139c74b4b349af996a67f408a8441f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51ae336c-12cf-406a-b1ca-54e9ce553b3e", "external-id": "nsx-vlan-transportzone-30", "segmentation_id": 30, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap67894e0d-c4", "ovs_interfaceid": "67894e0d-c4f4-47d4-bcac-520aa7a3626b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1999}} [ 971.497154] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-63bb133d-fd32-437e-9e45-f0ca22d8b321 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: 4d7f62b3-42d0-4f98-bac4-541f116c9709] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d3:aa:b1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '51ae336c-12cf-406a-b1ca-54e9ce553b3e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '67894e0d-c4f4-47d4-bcac-520aa7a3626b', 'vif_model': 'vmxnet3'}] {{(pid=62503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 971.504839] env[62503]: DEBUG oslo.service.loopingcall [None req-63bb133d-fd32-437e-9e45-f0ca22d8b321 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 971.505071] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4d7f62b3-42d0-4f98-bac4-541f116c9709] Creating VM on the ESX host {{(pid=62503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 971.505334] env[62503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e4ace170-632e-4c06-9518-7d48e9b33e00 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.525892] env[62503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 971.525892] env[62503]: value = "task-1388047" [ 971.525892] env[62503]: _type = "Task" [ 971.525892] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 971.534397] env[62503]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388047, 'name': CreateVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.697602] env[62503]: DEBUG oslo_vmware.api [None req-4463d93d-15a0-436b-a7f0-308ffa1fba90 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Task: {'id': task-1388046, 'name': SuspendVM_Task} progress is 50%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.718185] env[62503]: DEBUG nova.compute.manager [None req-fd23b35c-41c1-495e-b29d-5f7df4b94656 tempest-ServersV294TestFqdnHostnames-893294453 tempest-ServersV294TestFqdnHostnames-893294453-project-member] [instance: bba6c92b-cac3-4677-a8f4-57a2704fc685] Start spawning the instance on the hypervisor. {{(pid=62503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2641}} [ 971.721666] env[62503]: DEBUG nova.compute.manager [req-0d61adfc-21a4-40fa-aec2-7f68c9cf82bd req-13991b8f-53b0-4865-9adf-7a6ffa86316d service nova] [instance: 4d7f62b3-42d0-4f98-bac4-541f116c9709] Received event network-changed-67894e0d-c4f4-47d4-bcac-520aa7a3626b {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 971.721860] env[62503]: DEBUG nova.compute.manager [req-0d61adfc-21a4-40fa-aec2-7f68c9cf82bd req-13991b8f-53b0-4865-9adf-7a6ffa86316d service nova] [instance: 4d7f62b3-42d0-4f98-bac4-541f116c9709] Refreshing instance network info cache due to event network-changed-67894e0d-c4f4-47d4-bcac-520aa7a3626b. {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11432}} [ 971.722091] env[62503]: DEBUG oslo_concurrency.lockutils [req-0d61adfc-21a4-40fa-aec2-7f68c9cf82bd req-13991b8f-53b0-4865-9adf-7a6ffa86316d service nova] Acquiring lock "refresh_cache-4d7f62b3-42d0-4f98-bac4-541f116c9709" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 971.722252] env[62503]: DEBUG oslo_concurrency.lockutils [req-0d61adfc-21a4-40fa-aec2-7f68c9cf82bd req-13991b8f-53b0-4865-9adf-7a6ffa86316d service nova] Acquired lock "refresh_cache-4d7f62b3-42d0-4f98-bac4-541f116c9709" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 971.722471] env[62503]: DEBUG nova.network.neutron [req-0d61adfc-21a4-40fa-aec2-7f68c9cf82bd req-13991b8f-53b0-4865-9adf-7a6ffa86316d service nova] [instance: 4d7f62b3-42d0-4f98-bac4-541f116c9709] Refreshing network info cache for port 67894e0d-c4f4-47d4-bcac-520aa7a3626b {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 971.751229] env[62503]: DEBUG nova.virt.hardware [None req-fd23b35c-41c1-495e-b29d-5f7df4b94656 tempest-ServersV294TestFqdnHostnames-893294453 tempest-ServersV294TestFqdnHostnames-893294453-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:26:20Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:26:03Z,direct_url=,disk_format='vmdk',id=8150ca02-f879-471d-8913-459408f127a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26d9ee75d25b4018b8bb1fb11c8bd98c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:26:04Z,virtual_size=,visibility=), allow threads: False {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 971.751579] env[62503]: DEBUG nova.virt.hardware [None req-fd23b35c-41c1-495e-b29d-5f7df4b94656 tempest-ServersV294TestFqdnHostnames-893294453 tempest-ServersV294TestFqdnHostnames-893294453-project-member] Flavor limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 971.751759] env[62503]: DEBUG nova.virt.hardware [None req-fd23b35c-41c1-495e-b29d-5f7df4b94656 tempest-ServersV294TestFqdnHostnames-893294453 tempest-ServersV294TestFqdnHostnames-893294453-project-member] Image limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 971.751955] env[62503]: DEBUG nova.virt.hardware [None req-fd23b35c-41c1-495e-b29d-5f7df4b94656 tempest-ServersV294TestFqdnHostnames-893294453 tempest-ServersV294TestFqdnHostnames-893294453-project-member] Flavor pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 971.752136] env[62503]: DEBUG nova.virt.hardware [None req-fd23b35c-41c1-495e-b29d-5f7df4b94656 tempest-ServersV294TestFqdnHostnames-893294453 tempest-ServersV294TestFqdnHostnames-893294453-project-member] Image pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 971.752323] env[62503]: DEBUG nova.virt.hardware [None req-fd23b35c-41c1-495e-b29d-5f7df4b94656 tempest-ServersV294TestFqdnHostnames-893294453 tempest-ServersV294TestFqdnHostnames-893294453-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 971.752568] env[62503]: DEBUG nova.virt.hardware [None req-fd23b35c-41c1-495e-b29d-5f7df4b94656 tempest-ServersV294TestFqdnHostnames-893294453 tempest-ServersV294TestFqdnHostnames-893294453-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 971.752739] env[62503]: DEBUG nova.virt.hardware [None req-fd23b35c-41c1-495e-b29d-5f7df4b94656 tempest-ServersV294TestFqdnHostnames-893294453 tempest-ServersV294TestFqdnHostnames-893294453-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 971.752916] env[62503]: DEBUG nova.virt.hardware [None req-fd23b35c-41c1-495e-b29d-5f7df4b94656 tempest-ServersV294TestFqdnHostnames-893294453 tempest-ServersV294TestFqdnHostnames-893294453-project-member] Got 1 possible topologies {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 971.753108] env[62503]: DEBUG nova.virt.hardware [None req-fd23b35c-41c1-495e-b29d-5f7df4b94656 tempest-ServersV294TestFqdnHostnames-893294453 tempest-ServersV294TestFqdnHostnames-893294453-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 971.753296] env[62503]: DEBUG nova.virt.hardware [None req-fd23b35c-41c1-495e-b29d-5f7df4b94656 tempest-ServersV294TestFqdnHostnames-893294453 tempest-ServersV294TestFqdnHostnames-893294453-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 971.754711] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74fe559b-6ff2-4082-a548-b2b982c4b901 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.763831] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-485b79f7-4d88-4d61-87c6-46f2790c3aa8 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.952764] env[62503]: DEBUG oslo_concurrency.lockutils [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.260s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 971.953349] env[62503]: DEBUG nova.compute.manager [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] [instance: a4e7ec7f-82d1-4fd8-b9c4-bf869aa0968d] Start building networks asynchronously for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2832}} [ 971.956710] env[62503]: DEBUG oslo_concurrency.lockutils [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.923s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 971.962473] env[62503]: INFO nova.compute.claims [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] [instance: b120b29f-0e26-465f-bc6f-4214525ae2de] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 972.038863] env[62503]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388047, 'name': CreateVM_Task, 'duration_secs': 0.424031} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 972.039109] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4d7f62b3-42d0-4f98-bac4-541f116c9709] Created VM on the ESX host {{(pid=62503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 972.039882] env[62503]: DEBUG oslo_concurrency.lockutils [None req-63bb133d-fd32-437e-9e45-f0ca22d8b321 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 972.040145] env[62503]: DEBUG oslo_concurrency.lockutils [None req-63bb133d-fd32-437e-9e45-f0ca22d8b321 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 972.040557] env[62503]: DEBUG oslo_concurrency.lockutils [None req-63bb133d-fd32-437e-9e45-f0ca22d8b321 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 972.040821] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-52927c48-5d3f-4d6a-8ef9-099cfeccc723 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.045773] env[62503]: DEBUG oslo_vmware.api [None req-63bb133d-fd32-437e-9e45-f0ca22d8b321 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Waiting for the task: (returnval){ [ 972.045773] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52eec9a1-81aa-7da4-2a37-b9c58a998227" [ 972.045773] env[62503]: _type = "Task" [ 972.045773] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 972.054092] env[62503]: DEBUG oslo_vmware.api [None req-63bb133d-fd32-437e-9e45-f0ca22d8b321 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52eec9a1-81aa-7da4-2a37-b9c58a998227, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.199029] env[62503]: DEBUG oslo_vmware.api [None req-4463d93d-15a0-436b-a7f0-308ffa1fba90 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Task: {'id': task-1388046, 'name': SuspendVM_Task, 'duration_secs': 0.799586} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 972.199650] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-4463d93d-15a0-436b-a7f0-308ffa1fba90 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: 12dff44c-ebb9-4fa3-8396-defcdb474152] Suspended the VM {{(pid=62503) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 972.200020] env[62503]: DEBUG nova.compute.manager [None req-4463d93d-15a0-436b-a7f0-308ffa1fba90 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: 12dff44c-ebb9-4fa3-8396-defcdb474152] Checking state {{(pid=62503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1794}} [ 972.200901] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb7ad5f6-99f9-40e8-9537-7d5e29ac6236 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.240430] env[62503]: DEBUG nova.network.neutron [None req-fd23b35c-41c1-495e-b29d-5f7df4b94656 tempest-ServersV294TestFqdnHostnames-893294453 tempest-ServersV294TestFqdnHostnames-893294453-project-member] [instance: bba6c92b-cac3-4677-a8f4-57a2704fc685] Successfully updated port: b2c7fd20-28f2-42c2-9cdd-d3ad7d14169a {{(pid=62503) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 972.462774] env[62503]: DEBUG nova.network.neutron [req-0d61adfc-21a4-40fa-aec2-7f68c9cf82bd req-13991b8f-53b0-4865-9adf-7a6ffa86316d service nova] [instance: 4d7f62b3-42d0-4f98-bac4-541f116c9709] Updated VIF entry in instance network info cache for port 67894e0d-c4f4-47d4-bcac-520aa7a3626b. {{(pid=62503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 972.463160] env[62503]: DEBUG nova.network.neutron [req-0d61adfc-21a4-40fa-aec2-7f68c9cf82bd req-13991b8f-53b0-4865-9adf-7a6ffa86316d service nova] [instance: 4d7f62b3-42d0-4f98-bac4-541f116c9709] Updating instance_info_cache with network_info: [{"id": "67894e0d-c4f4-47d4-bcac-520aa7a3626b", "address": "fa:16:3e:d3:aa:b1", "network": {"id": "ae12a8a2-c7af-40e0-878a-95b9a17d8356", "bridge": "br-int", "label": "tempest-ServersTestJSON-1776284685-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "44139c74b4b349af996a67f408a8441f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51ae336c-12cf-406a-b1ca-54e9ce553b3e", "external-id": "nsx-vlan-transportzone-30", "segmentation_id": 30, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap67894e0d-c4", "ovs_interfaceid": "67894e0d-c4f4-47d4-bcac-520aa7a3626b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 972.470285] env[62503]: DEBUG nova.compute.utils [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Using /dev/sd instead of None {{(pid=62503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 972.473659] env[62503]: DEBUG nova.compute.manager [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] [instance: a4e7ec7f-82d1-4fd8-b9c4-bf869aa0968d] Allocating IP information in the background. {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 972.473843] env[62503]: DEBUG nova.network.neutron [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] [instance: a4e7ec7f-82d1-4fd8-b9c4-bf869aa0968d] allocate_for_instance() {{(pid=62503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 972.558092] env[62503]: DEBUG oslo_vmware.api [None req-63bb133d-fd32-437e-9e45-f0ca22d8b321 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52eec9a1-81aa-7da4-2a37-b9c58a998227, 'name': SearchDatastore_Task, 'duration_secs': 0.012207} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 972.561088] env[62503]: DEBUG oslo_concurrency.lockutils [None req-63bb133d-fd32-437e-9e45-f0ca22d8b321 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 972.561088] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-63bb133d-fd32-437e-9e45-f0ca22d8b321 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: 4d7f62b3-42d0-4f98-bac4-541f116c9709] Processing image 8150ca02-f879-471d-8913-459408f127a1 {{(pid=62503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 972.561088] env[62503]: DEBUG oslo_concurrency.lockutils [None req-63bb133d-fd32-437e-9e45-f0ca22d8b321 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 972.561088] env[62503]: DEBUG oslo_concurrency.lockutils [None req-63bb133d-fd32-437e-9e45-f0ca22d8b321 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 972.561088] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-63bb133d-fd32-437e-9e45-f0ca22d8b321 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 972.561088] env[62503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-981b9e4d-867c-4a07-aa5a-a11ce70979a4 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.568227] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-63bb133d-fd32-437e-9e45-f0ca22d8b321 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 972.568627] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-63bb133d-fd32-437e-9e45-f0ca22d8b321 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 972.569462] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e5ed99d9-266a-4dc6-9734-e9820e3a4f1b {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.574969] env[62503]: DEBUG oslo_vmware.api [None req-63bb133d-fd32-437e-9e45-f0ca22d8b321 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Waiting for the task: (returnval){ [ 972.574969] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52cc43d2-7f2c-8d42-093a-7bb3efe76f7c" [ 972.574969] env[62503]: _type = "Task" [ 972.574969] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 972.583137] env[62503]: DEBUG oslo_vmware.api [None req-63bb133d-fd32-437e-9e45-f0ca22d8b321 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52cc43d2-7f2c-8d42-093a-7bb3efe76f7c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.683704] env[62503]: DEBUG nova.policy [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '111ad3ca820d4ab0a3ac466a24d9526d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c9b69dfe9a9a44188c612fd777341101', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62503) authorize /opt/stack/nova/nova/policy.py:201}} [ 972.744523] env[62503]: DEBUG oslo_concurrency.lockutils [None req-fd23b35c-41c1-495e-b29d-5f7df4b94656 tempest-ServersV294TestFqdnHostnames-893294453 tempest-ServersV294TestFqdnHostnames-893294453-project-member] Acquiring lock "refresh_cache-bba6c92b-cac3-4677-a8f4-57a2704fc685" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 972.744523] env[62503]: DEBUG oslo_concurrency.lockutils [None req-fd23b35c-41c1-495e-b29d-5f7df4b94656 tempest-ServersV294TestFqdnHostnames-893294453 tempest-ServersV294TestFqdnHostnames-893294453-project-member] Acquired lock "refresh_cache-bba6c92b-cac3-4677-a8f4-57a2704fc685" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 972.744523] env[62503]: DEBUG nova.network.neutron [None req-fd23b35c-41c1-495e-b29d-5f7df4b94656 tempest-ServersV294TestFqdnHostnames-893294453 tempest-ServersV294TestFqdnHostnames-893294453-project-member] [instance: bba6c92b-cac3-4677-a8f4-57a2704fc685] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 972.920103] env[62503]: DEBUG nova.network.neutron [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] [instance: a4e7ec7f-82d1-4fd8-b9c4-bf869aa0968d] Successfully created port: 6d46d504-d2d1-40ff-ac6f-6355cc56d1d4 {{(pid=62503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 972.965870] env[62503]: DEBUG oslo_concurrency.lockutils [req-0d61adfc-21a4-40fa-aec2-7f68c9cf82bd req-13991b8f-53b0-4865-9adf-7a6ffa86316d service nova] Releasing lock "refresh_cache-4d7f62b3-42d0-4f98-bac4-541f116c9709" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 972.980299] env[62503]: DEBUG nova.compute.manager [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] [instance: a4e7ec7f-82d1-4fd8-b9c4-bf869aa0968d] Start building block device mappings for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2867}} [ 973.089323] env[62503]: DEBUG oslo_vmware.api [None req-63bb133d-fd32-437e-9e45-f0ca22d8b321 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52cc43d2-7f2c-8d42-093a-7bb3efe76f7c, 'name': SearchDatastore_Task, 'duration_secs': 0.009} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 973.090178] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d1922e44-ad1f-4a27-a792-9bfb2a24f495 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.096227] env[62503]: DEBUG oslo_vmware.api [None req-63bb133d-fd32-437e-9e45-f0ca22d8b321 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Waiting for the task: (returnval){ [ 973.096227] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52428472-4278-9dc7-8c7b-f127d18d7e34" [ 973.096227] env[62503]: _type = "Task" [ 973.096227] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.104322] env[62503]: DEBUG oslo_vmware.api [None req-63bb133d-fd32-437e-9e45-f0ca22d8b321 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52428472-4278-9dc7-8c7b-f127d18d7e34, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.172209] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e130eb04-dc49-4037-b5ef-285a65daee59 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.179570] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bec361a2-62df-46de-9d2a-680c9e75417d {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.215599] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6006f27b-79da-4ab6-81fe-a1ab092c69e3 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.224249] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32ea0d54-df41-4121-9718-4ea6f3dfaf41 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.238663] env[62503]: DEBUG nova.compute.provider_tree [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 973.287505] env[62503]: DEBUG nova.network.neutron [None req-fd23b35c-41c1-495e-b29d-5f7df4b94656 tempest-ServersV294TestFqdnHostnames-893294453 tempest-ServersV294TestFqdnHostnames-893294453-project-member] [instance: bba6c92b-cac3-4677-a8f4-57a2704fc685] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 973.419020] env[62503]: DEBUG nova.network.neutron [None req-fd23b35c-41c1-495e-b29d-5f7df4b94656 tempest-ServersV294TestFqdnHostnames-893294453 tempest-ServersV294TestFqdnHostnames-893294453-project-member] [instance: bba6c92b-cac3-4677-a8f4-57a2704fc685] Updating instance_info_cache with network_info: [{"id": "b2c7fd20-28f2-42c2-9cdd-d3ad7d14169a", "address": "fa:16:3e:ea:52:fa", "network": {"id": "dd3019b1-d7ac-4167-9afc-c4accedb7cf9", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-1017738732-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "93cccd8859f649f4a3444c42a8c188f5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9b7e9e55-3210-4fae-9648-d87e76c3d931", "external-id": "nsx-vlan-transportzone-967", "segmentation_id": 967, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb2c7fd20-28", "ovs_interfaceid": "b2c7fd20-28f2-42c2-9cdd-d3ad7d14169a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 973.606149] env[62503]: DEBUG oslo_vmware.api [None req-63bb133d-fd32-437e-9e45-f0ca22d8b321 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52428472-4278-9dc7-8c7b-f127d18d7e34, 'name': SearchDatastore_Task, 'duration_secs': 0.008668} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 973.606478] env[62503]: DEBUG oslo_concurrency.lockutils [None req-63bb133d-fd32-437e-9e45-f0ca22d8b321 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 973.606775] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-63bb133d-fd32-437e-9e45-f0ca22d8b321 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk to [datastore2] 4d7f62b3-42d0-4f98-bac4-541f116c9709/4d7f62b3-42d0-4f98-bac4-541f116c9709.vmdk {{(pid=62503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 973.607081] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-90215919-2287-48b9-8b9b-f969ee05f4eb {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.614837] env[62503]: DEBUG oslo_vmware.api [None req-63bb133d-fd32-437e-9e45-f0ca22d8b321 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Waiting for the task: (returnval){ [ 973.614837] env[62503]: value = "task-1388048" [ 973.614837] env[62503]: _type = "Task" [ 973.614837] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.621353] env[62503]: DEBUG oslo_vmware.api [None req-63bb133d-fd32-437e-9e45-f0ca22d8b321 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Task: {'id': task-1388048, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.715591] env[62503]: DEBUG oslo_concurrency.lockutils [None req-a8dd5bc6-9259-46ab-bd5b-40003e4e3b6a tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Acquiring lock "12dff44c-ebb9-4fa3-8396-defcdb474152" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 973.715899] env[62503]: DEBUG oslo_concurrency.lockutils [None req-a8dd5bc6-9259-46ab-bd5b-40003e4e3b6a tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Lock "12dff44c-ebb9-4fa3-8396-defcdb474152" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 973.716133] env[62503]: DEBUG oslo_concurrency.lockutils [None req-a8dd5bc6-9259-46ab-bd5b-40003e4e3b6a tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Acquiring lock "12dff44c-ebb9-4fa3-8396-defcdb474152-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 973.716326] env[62503]: DEBUG oslo_concurrency.lockutils [None req-a8dd5bc6-9259-46ab-bd5b-40003e4e3b6a tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Lock "12dff44c-ebb9-4fa3-8396-defcdb474152-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 973.716515] env[62503]: DEBUG oslo_concurrency.lockutils [None req-a8dd5bc6-9259-46ab-bd5b-40003e4e3b6a tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Lock "12dff44c-ebb9-4fa3-8396-defcdb474152-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 973.718885] env[62503]: INFO nova.compute.manager [None req-a8dd5bc6-9259-46ab-bd5b-40003e4e3b6a tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: 12dff44c-ebb9-4fa3-8396-defcdb474152] Terminating instance [ 973.721654] env[62503]: DEBUG nova.compute.manager [None req-a8dd5bc6-9259-46ab-bd5b-40003e4e3b6a tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: 12dff44c-ebb9-4fa3-8396-defcdb474152] Start destroying the instance on the hypervisor. {{(pid=62503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3156}} [ 973.721654] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-a8dd5bc6-9259-46ab-bd5b-40003e4e3b6a tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: 12dff44c-ebb9-4fa3-8396-defcdb474152] Destroying instance {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 973.722288] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-949fde12-c1d7-4262-8c32-ba6c95ede7a0 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.729808] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-a8dd5bc6-9259-46ab-bd5b-40003e4e3b6a tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: 12dff44c-ebb9-4fa3-8396-defcdb474152] Unregistering the VM {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 973.730068] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f023958b-80ef-42b1-a6e8-c5b26e5d5ad1 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.742116] env[62503]: DEBUG nova.scheduler.client.report [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 973.747057] env[62503]: DEBUG nova.compute.manager [req-1e8373d8-745d-428c-9a22-8bd620525e34 req-09c8b09d-7fc3-4093-9680-e701105bc7c8 service nova] [instance: bba6c92b-cac3-4677-a8f4-57a2704fc685] Received event network-vif-plugged-b2c7fd20-28f2-42c2-9cdd-d3ad7d14169a {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 973.747260] env[62503]: DEBUG oslo_concurrency.lockutils [req-1e8373d8-745d-428c-9a22-8bd620525e34 req-09c8b09d-7fc3-4093-9680-e701105bc7c8 service nova] Acquiring lock "bba6c92b-cac3-4677-a8f4-57a2704fc685-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 973.747512] env[62503]: DEBUG oslo_concurrency.lockutils [req-1e8373d8-745d-428c-9a22-8bd620525e34 req-09c8b09d-7fc3-4093-9680-e701105bc7c8 service nova] Lock "bba6c92b-cac3-4677-a8f4-57a2704fc685-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 973.747657] env[62503]: DEBUG oslo_concurrency.lockutils [req-1e8373d8-745d-428c-9a22-8bd620525e34 req-09c8b09d-7fc3-4093-9680-e701105bc7c8 service nova] Lock "bba6c92b-cac3-4677-a8f4-57a2704fc685-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 973.747841] env[62503]: DEBUG nova.compute.manager [req-1e8373d8-745d-428c-9a22-8bd620525e34 req-09c8b09d-7fc3-4093-9680-e701105bc7c8 service nova] [instance: bba6c92b-cac3-4677-a8f4-57a2704fc685] No waiting events found dispatching network-vif-plugged-b2c7fd20-28f2-42c2-9cdd-d3ad7d14169a {{(pid=62503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 973.748041] env[62503]: WARNING nova.compute.manager [req-1e8373d8-745d-428c-9a22-8bd620525e34 req-09c8b09d-7fc3-4093-9680-e701105bc7c8 service nova] [instance: bba6c92b-cac3-4677-a8f4-57a2704fc685] Received unexpected event network-vif-plugged-b2c7fd20-28f2-42c2-9cdd-d3ad7d14169a for instance with vm_state building and task_state spawning. [ 973.748236] env[62503]: DEBUG nova.compute.manager [req-1e8373d8-745d-428c-9a22-8bd620525e34 req-09c8b09d-7fc3-4093-9680-e701105bc7c8 service nova] [instance: bba6c92b-cac3-4677-a8f4-57a2704fc685] Received event network-changed-b2c7fd20-28f2-42c2-9cdd-d3ad7d14169a {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 973.748406] env[62503]: DEBUG nova.compute.manager [req-1e8373d8-745d-428c-9a22-8bd620525e34 req-09c8b09d-7fc3-4093-9680-e701105bc7c8 service nova] [instance: bba6c92b-cac3-4677-a8f4-57a2704fc685] Refreshing instance network info cache due to event network-changed-b2c7fd20-28f2-42c2-9cdd-d3ad7d14169a. {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11432}} [ 973.748579] env[62503]: DEBUG oslo_concurrency.lockutils [req-1e8373d8-745d-428c-9a22-8bd620525e34 req-09c8b09d-7fc3-4093-9680-e701105bc7c8 service nova] Acquiring lock "refresh_cache-bba6c92b-cac3-4677-a8f4-57a2704fc685" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 973.806940] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-a8dd5bc6-9259-46ab-bd5b-40003e4e3b6a tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: 12dff44c-ebb9-4fa3-8396-defcdb474152] Unregistered the VM {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 973.807165] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-a8dd5bc6-9259-46ab-bd5b-40003e4e3b6a tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: 12dff44c-ebb9-4fa3-8396-defcdb474152] Deleting contents of the VM from datastore datastore1 {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 973.807407] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-a8dd5bc6-9259-46ab-bd5b-40003e4e3b6a tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Deleting the datastore file [datastore1] 12dff44c-ebb9-4fa3-8396-defcdb474152 {{(pid=62503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 973.807706] env[62503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f2bb2a5c-8012-43ea-ac91-3fe0e8855458 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.815242] env[62503]: DEBUG oslo_vmware.api [None req-a8dd5bc6-9259-46ab-bd5b-40003e4e3b6a tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Waiting for the task: (returnval){ [ 973.815242] env[62503]: value = "task-1388050" [ 973.815242] env[62503]: _type = "Task" [ 973.815242] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.823273] env[62503]: DEBUG oslo_vmware.api [None req-a8dd5bc6-9259-46ab-bd5b-40003e4e3b6a tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Task: {'id': task-1388050, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.921706] env[62503]: DEBUG oslo_concurrency.lockutils [None req-fd23b35c-41c1-495e-b29d-5f7df4b94656 tempest-ServersV294TestFqdnHostnames-893294453 tempest-ServersV294TestFqdnHostnames-893294453-project-member] Releasing lock "refresh_cache-bba6c92b-cac3-4677-a8f4-57a2704fc685" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 973.922123] env[62503]: DEBUG nova.compute.manager [None req-fd23b35c-41c1-495e-b29d-5f7df4b94656 tempest-ServersV294TestFqdnHostnames-893294453 tempest-ServersV294TestFqdnHostnames-893294453-project-member] [instance: bba6c92b-cac3-4677-a8f4-57a2704fc685] Instance network_info: |[{"id": "b2c7fd20-28f2-42c2-9cdd-d3ad7d14169a", "address": "fa:16:3e:ea:52:fa", "network": {"id": "dd3019b1-d7ac-4167-9afc-c4accedb7cf9", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-1017738732-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "93cccd8859f649f4a3444c42a8c188f5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9b7e9e55-3210-4fae-9648-d87e76c3d931", "external-id": "nsx-vlan-transportzone-967", "segmentation_id": 967, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb2c7fd20-28", "ovs_interfaceid": "b2c7fd20-28f2-42c2-9cdd-d3ad7d14169a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1999}} [ 973.922441] env[62503]: DEBUG oslo_concurrency.lockutils [req-1e8373d8-745d-428c-9a22-8bd620525e34 req-09c8b09d-7fc3-4093-9680-e701105bc7c8 service nova] Acquired lock "refresh_cache-bba6c92b-cac3-4677-a8f4-57a2704fc685" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 973.922693] env[62503]: DEBUG nova.network.neutron [req-1e8373d8-745d-428c-9a22-8bd620525e34 req-09c8b09d-7fc3-4093-9680-e701105bc7c8 service nova] [instance: bba6c92b-cac3-4677-a8f4-57a2704fc685] Refreshing network info cache for port b2c7fd20-28f2-42c2-9cdd-d3ad7d14169a {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 973.924067] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-fd23b35c-41c1-495e-b29d-5f7df4b94656 tempest-ServersV294TestFqdnHostnames-893294453 tempest-ServersV294TestFqdnHostnames-893294453-project-member] [instance: bba6c92b-cac3-4677-a8f4-57a2704fc685] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ea:52:fa', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9b7e9e55-3210-4fae-9648-d87e76c3d931', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b2c7fd20-28f2-42c2-9cdd-d3ad7d14169a', 'vif_model': 'vmxnet3'}] {{(pid=62503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 973.932117] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd23b35c-41c1-495e-b29d-5f7df4b94656 tempest-ServersV294TestFqdnHostnames-893294453 tempest-ServersV294TestFqdnHostnames-893294453-project-member] Creating folder: Project (93cccd8859f649f4a3444c42a8c188f5). Parent ref: group-v294540. {{(pid=62503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 973.935149] env[62503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-12a633e9-e55a-4235-ad56-32e000eb3fcb {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.946174] env[62503]: INFO nova.virt.vmwareapi.vm_util [None req-fd23b35c-41c1-495e-b29d-5f7df4b94656 tempest-ServersV294TestFqdnHostnames-893294453 tempest-ServersV294TestFqdnHostnames-893294453-project-member] Created folder: Project (93cccd8859f649f4a3444c42a8c188f5) in parent group-v294540. [ 973.946403] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd23b35c-41c1-495e-b29d-5f7df4b94656 tempest-ServersV294TestFqdnHostnames-893294453 tempest-ServersV294TestFqdnHostnames-893294453-project-member] Creating folder: Instances. Parent ref: group-v294627. {{(pid=62503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 973.946666] env[62503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1c6b0ef1-836e-4e48-892b-84a853acead8 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.958021] env[62503]: INFO nova.virt.vmwareapi.vm_util [None req-fd23b35c-41c1-495e-b29d-5f7df4b94656 tempest-ServersV294TestFqdnHostnames-893294453 tempest-ServersV294TestFqdnHostnames-893294453-project-member] Created folder: Instances in parent group-v294627. [ 973.958360] env[62503]: DEBUG oslo.service.loopingcall [None req-fd23b35c-41c1-495e-b29d-5f7df4b94656 tempest-ServersV294TestFqdnHostnames-893294453 tempest-ServersV294TestFqdnHostnames-893294453-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 973.958535] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bba6c92b-cac3-4677-a8f4-57a2704fc685] Creating VM on the ESX host {{(pid=62503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 973.958763] env[62503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6d53b191-6ff4-4e4a-b39b-d230865b7d9f {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.991136] env[62503]: DEBUG nova.compute.manager [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] [instance: a4e7ec7f-82d1-4fd8-b9c4-bf869aa0968d] Start spawning the instance on the hypervisor. {{(pid=62503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2641}} [ 973.997528] env[62503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 973.997528] env[62503]: value = "task-1388053" [ 973.997528] env[62503]: _type = "Task" [ 973.997528] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 974.014826] env[62503]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388053, 'name': CreateVM_Task} progress is 6%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.029601] env[62503]: DEBUG nova.virt.hardware [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:26:20Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:26:03Z,direct_url=,disk_format='vmdk',id=8150ca02-f879-471d-8913-459408f127a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26d9ee75d25b4018b8bb1fb11c8bd98c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:26:04Z,virtual_size=,visibility=), allow threads: False {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 974.032558] env[62503]: DEBUG nova.virt.hardware [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Flavor limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 974.032558] env[62503]: DEBUG nova.virt.hardware [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Image limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 974.032558] env[62503]: DEBUG nova.virt.hardware [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Flavor pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 974.032558] env[62503]: DEBUG nova.virt.hardware [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Image pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 974.032558] env[62503]: DEBUG nova.virt.hardware [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 974.032558] env[62503]: DEBUG nova.virt.hardware [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 974.032558] env[62503]: DEBUG nova.virt.hardware [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 974.032558] env[62503]: DEBUG nova.virt.hardware [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Got 1 possible topologies {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 974.032558] env[62503]: DEBUG nova.virt.hardware [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 974.032558] env[62503]: DEBUG nova.virt.hardware [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 974.032558] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fac1d57-5c76-4923-99bf-07e535559d63 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.043255] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-332501b8-754f-469f-8c6b-1db24fdbe005 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.123470] env[62503]: DEBUG oslo_vmware.api [None req-63bb133d-fd32-437e-9e45-f0ca22d8b321 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Task: {'id': task-1388048, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.449795} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 974.126070] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-63bb133d-fd32-437e-9e45-f0ca22d8b321 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk to [datastore2] 4d7f62b3-42d0-4f98-bac4-541f116c9709/4d7f62b3-42d0-4f98-bac4-541f116c9709.vmdk {{(pid=62503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 974.126305] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-63bb133d-fd32-437e-9e45-f0ca22d8b321 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: 4d7f62b3-42d0-4f98-bac4-541f116c9709] Extending root virtual disk to 1048576 {{(pid=62503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 974.126569] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-636c7fb7-f1b6-4a92-9146-d7a86805489a {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.132957] env[62503]: DEBUG oslo_vmware.api [None req-63bb133d-fd32-437e-9e45-f0ca22d8b321 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Waiting for the task: (returnval){ [ 974.132957] env[62503]: value = "task-1388054" [ 974.132957] env[62503]: _type = "Task" [ 974.132957] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 974.140531] env[62503]: DEBUG oslo_vmware.api [None req-63bb133d-fd32-437e-9e45-f0ca22d8b321 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Task: {'id': task-1388054, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.196293] env[62503]: DEBUG nova.network.neutron [req-1e8373d8-745d-428c-9a22-8bd620525e34 req-09c8b09d-7fc3-4093-9680-e701105bc7c8 service nova] [instance: bba6c92b-cac3-4677-a8f4-57a2704fc685] Updated VIF entry in instance network info cache for port b2c7fd20-28f2-42c2-9cdd-d3ad7d14169a. {{(pid=62503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 974.196711] env[62503]: DEBUG nova.network.neutron [req-1e8373d8-745d-428c-9a22-8bd620525e34 req-09c8b09d-7fc3-4093-9680-e701105bc7c8 service nova] [instance: bba6c92b-cac3-4677-a8f4-57a2704fc685] Updating instance_info_cache with network_info: [{"id": "b2c7fd20-28f2-42c2-9cdd-d3ad7d14169a", "address": "fa:16:3e:ea:52:fa", "network": {"id": "dd3019b1-d7ac-4167-9afc-c4accedb7cf9", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-1017738732-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "93cccd8859f649f4a3444c42a8c188f5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9b7e9e55-3210-4fae-9648-d87e76c3d931", "external-id": "nsx-vlan-transportzone-967", "segmentation_id": 967, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb2c7fd20-28", "ovs_interfaceid": "b2c7fd20-28f2-42c2-9cdd-d3ad7d14169a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 974.250585] env[62503]: DEBUG oslo_concurrency.lockutils [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.294s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 974.251155] env[62503]: DEBUG nova.compute.manager [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] [instance: b120b29f-0e26-465f-bc6f-4214525ae2de] Start building networks asynchronously for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2832}} [ 974.253853] env[62503]: DEBUG oslo_concurrency.lockutils [None req-11342709-c4fc-4d03-8c6f-f5c2d7bba682 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.948s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 974.255333] env[62503]: INFO nova.compute.claims [None req-11342709-c4fc-4d03-8c6f-f5c2d7bba682 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: ccc542a3-ff01-42ca-965e-706bed4c6e07] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 974.326225] env[62503]: DEBUG oslo_vmware.api [None req-a8dd5bc6-9259-46ab-bd5b-40003e4e3b6a tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Task: {'id': task-1388050, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.297384} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 974.326554] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-a8dd5bc6-9259-46ab-bd5b-40003e4e3b6a tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Deleted the datastore file {{(pid=62503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 974.326789] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-a8dd5bc6-9259-46ab-bd5b-40003e4e3b6a tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: 12dff44c-ebb9-4fa3-8396-defcdb474152] Deleted contents of the VM from datastore datastore1 {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 974.327018] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-a8dd5bc6-9259-46ab-bd5b-40003e4e3b6a tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: 12dff44c-ebb9-4fa3-8396-defcdb474152] Instance destroyed {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 974.327243] env[62503]: INFO nova.compute.manager [None req-a8dd5bc6-9259-46ab-bd5b-40003e4e3b6a tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: 12dff44c-ebb9-4fa3-8396-defcdb474152] Took 0.61 seconds to destroy the instance on the hypervisor. [ 974.327532] env[62503]: DEBUG oslo.service.loopingcall [None req-a8dd5bc6-9259-46ab-bd5b-40003e4e3b6a tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 974.327766] env[62503]: DEBUG nova.compute.manager [-] [instance: 12dff44c-ebb9-4fa3-8396-defcdb474152] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 974.327894] env[62503]: DEBUG nova.network.neutron [-] [instance: 12dff44c-ebb9-4fa3-8396-defcdb474152] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 974.335288] env[62503]: DEBUG nova.compute.manager [req-6dabd3b5-ff4f-40f5-a037-06aa764fdc70 req-a0e794e1-2a81-4199-8ebe-a9f5113fd2bb service nova] [instance: a4e7ec7f-82d1-4fd8-b9c4-bf869aa0968d] Received event network-vif-plugged-6d46d504-d2d1-40ff-ac6f-6355cc56d1d4 {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 974.335288] env[62503]: DEBUG oslo_concurrency.lockutils [req-6dabd3b5-ff4f-40f5-a037-06aa764fdc70 req-a0e794e1-2a81-4199-8ebe-a9f5113fd2bb service nova] Acquiring lock "a4e7ec7f-82d1-4fd8-b9c4-bf869aa0968d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 974.335288] env[62503]: DEBUG oslo_concurrency.lockutils [req-6dabd3b5-ff4f-40f5-a037-06aa764fdc70 req-a0e794e1-2a81-4199-8ebe-a9f5113fd2bb service nova] Lock "a4e7ec7f-82d1-4fd8-b9c4-bf869aa0968d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 974.335288] env[62503]: DEBUG oslo_concurrency.lockutils [req-6dabd3b5-ff4f-40f5-a037-06aa764fdc70 req-a0e794e1-2a81-4199-8ebe-a9f5113fd2bb service nova] Lock "a4e7ec7f-82d1-4fd8-b9c4-bf869aa0968d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 974.335288] env[62503]: DEBUG nova.compute.manager [req-6dabd3b5-ff4f-40f5-a037-06aa764fdc70 req-a0e794e1-2a81-4199-8ebe-a9f5113fd2bb service nova] [instance: a4e7ec7f-82d1-4fd8-b9c4-bf869aa0968d] No waiting events found dispatching network-vif-plugged-6d46d504-d2d1-40ff-ac6f-6355cc56d1d4 {{(pid=62503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 974.335288] env[62503]: WARNING nova.compute.manager [req-6dabd3b5-ff4f-40f5-a037-06aa764fdc70 req-a0e794e1-2a81-4199-8ebe-a9f5113fd2bb service nova] [instance: a4e7ec7f-82d1-4fd8-b9c4-bf869aa0968d] Received unexpected event network-vif-plugged-6d46d504-d2d1-40ff-ac6f-6355cc56d1d4 for instance with vm_state building and task_state spawning. [ 974.507304] env[62503]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388053, 'name': CreateVM_Task, 'duration_secs': 0.380154} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 974.507481] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bba6c92b-cac3-4677-a8f4-57a2704fc685] Created VM on the ESX host {{(pid=62503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 974.508225] env[62503]: DEBUG oslo_concurrency.lockutils [None req-fd23b35c-41c1-495e-b29d-5f7df4b94656 tempest-ServersV294TestFqdnHostnames-893294453 tempest-ServersV294TestFqdnHostnames-893294453-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 974.508399] env[62503]: DEBUG oslo_concurrency.lockutils [None req-fd23b35c-41c1-495e-b29d-5f7df4b94656 tempest-ServersV294TestFqdnHostnames-893294453 tempest-ServersV294TestFqdnHostnames-893294453-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 974.508712] env[62503]: DEBUG oslo_concurrency.lockutils [None req-fd23b35c-41c1-495e-b29d-5f7df4b94656 tempest-ServersV294TestFqdnHostnames-893294453 tempest-ServersV294TestFqdnHostnames-893294453-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 974.508961] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-910fe274-7780-4e4e-97da-2dadeafb9bd4 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.513608] env[62503]: DEBUG oslo_vmware.api [None req-fd23b35c-41c1-495e-b29d-5f7df4b94656 tempest-ServersV294TestFqdnHostnames-893294453 tempest-ServersV294TestFqdnHostnames-893294453-project-member] Waiting for the task: (returnval){ [ 974.513608] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]5247151e-4541-8c83-a15b-76c1c0d4c9c6" [ 974.513608] env[62503]: _type = "Task" [ 974.513608] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 974.520666] env[62503]: DEBUG oslo_vmware.api [None req-fd23b35c-41c1-495e-b29d-5f7df4b94656 tempest-ServersV294TestFqdnHostnames-893294453 tempest-ServersV294TestFqdnHostnames-893294453-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]5247151e-4541-8c83-a15b-76c1c0d4c9c6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.644168] env[62503]: DEBUG oslo_vmware.api [None req-63bb133d-fd32-437e-9e45-f0ca22d8b321 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Task: {'id': task-1388054, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064819} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 974.644459] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-63bb133d-fd32-437e-9e45-f0ca22d8b321 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: 4d7f62b3-42d0-4f98-bac4-541f116c9709] Extended root virtual disk {{(pid=62503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 974.645168] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c700c187-7305-4636-b064-a773563b236f {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.667030] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-63bb133d-fd32-437e-9e45-f0ca22d8b321 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: 4d7f62b3-42d0-4f98-bac4-541f116c9709] Reconfiguring VM instance instance-0000005b to attach disk [datastore2] 4d7f62b3-42d0-4f98-bac4-541f116c9709/4d7f62b3-42d0-4f98-bac4-541f116c9709.vmdk or device None with type sparse {{(pid=62503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 974.667177] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-384ccf0c-d81a-43e9-89bf-43c349077560 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.685443] env[62503]: DEBUG oslo_vmware.api [None req-63bb133d-fd32-437e-9e45-f0ca22d8b321 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Waiting for the task: (returnval){ [ 974.685443] env[62503]: value = "task-1388055" [ 974.685443] env[62503]: _type = "Task" [ 974.685443] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 974.692784] env[62503]: DEBUG oslo_vmware.api [None req-63bb133d-fd32-437e-9e45-f0ca22d8b321 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Task: {'id': task-1388055, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.699405] env[62503]: DEBUG oslo_concurrency.lockutils [req-1e8373d8-745d-428c-9a22-8bd620525e34 req-09c8b09d-7fc3-4093-9680-e701105bc7c8 service nova] Releasing lock "refresh_cache-bba6c92b-cac3-4677-a8f4-57a2704fc685" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 974.760063] env[62503]: DEBUG nova.compute.utils [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Using /dev/sd instead of None {{(pid=62503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 974.763196] env[62503]: DEBUG nova.compute.manager [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] [instance: b120b29f-0e26-465f-bc6f-4214525ae2de] Allocating IP information in the background. {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 974.763361] env[62503]: DEBUG nova.network.neutron [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] [instance: b120b29f-0e26-465f-bc6f-4214525ae2de] allocate_for_instance() {{(pid=62503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 974.806189] env[62503]: DEBUG nova.policy [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '111ad3ca820d4ab0a3ac466a24d9526d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c9b69dfe9a9a44188c612fd777341101', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62503) authorize /opt/stack/nova/nova/policy.py:201}} [ 974.893307] env[62503]: DEBUG nova.network.neutron [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] [instance: a4e7ec7f-82d1-4fd8-b9c4-bf869aa0968d] Successfully updated port: 6d46d504-d2d1-40ff-ac6f-6355cc56d1d4 {{(pid=62503) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 974.928301] env[62503]: DEBUG nova.compute.manager [req-65fd8df6-3628-4f85-8f71-268afbb098e0 req-d73ee53a-8c53-435b-867a-4679e2787a12 service nova] [instance: a4e7ec7f-82d1-4fd8-b9c4-bf869aa0968d] Received event network-changed-6d46d504-d2d1-40ff-ac6f-6355cc56d1d4 {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 974.928645] env[62503]: DEBUG nova.compute.manager [req-65fd8df6-3628-4f85-8f71-268afbb098e0 req-d73ee53a-8c53-435b-867a-4679e2787a12 service nova] [instance: a4e7ec7f-82d1-4fd8-b9c4-bf869aa0968d] Refreshing instance network info cache due to event network-changed-6d46d504-d2d1-40ff-ac6f-6355cc56d1d4. {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11432}} [ 974.928919] env[62503]: DEBUG oslo_concurrency.lockutils [req-65fd8df6-3628-4f85-8f71-268afbb098e0 req-d73ee53a-8c53-435b-867a-4679e2787a12 service nova] Acquiring lock "refresh_cache-a4e7ec7f-82d1-4fd8-b9c4-bf869aa0968d" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 974.929133] env[62503]: DEBUG oslo_concurrency.lockutils [req-65fd8df6-3628-4f85-8f71-268afbb098e0 req-d73ee53a-8c53-435b-867a-4679e2787a12 service nova] Acquired lock "refresh_cache-a4e7ec7f-82d1-4fd8-b9c4-bf869aa0968d" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 974.929370] env[62503]: DEBUG nova.network.neutron [req-65fd8df6-3628-4f85-8f71-268afbb098e0 req-d73ee53a-8c53-435b-867a-4679e2787a12 service nova] [instance: a4e7ec7f-82d1-4fd8-b9c4-bf869aa0968d] Refreshing network info cache for port 6d46d504-d2d1-40ff-ac6f-6355cc56d1d4 {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 975.024590] env[62503]: DEBUG oslo_vmware.api [None req-fd23b35c-41c1-495e-b29d-5f7df4b94656 tempest-ServersV294TestFqdnHostnames-893294453 tempest-ServersV294TestFqdnHostnames-893294453-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]5247151e-4541-8c83-a15b-76c1c0d4c9c6, 'name': SearchDatastore_Task, 'duration_secs': 0.009499} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 975.024900] env[62503]: DEBUG oslo_concurrency.lockutils [None req-fd23b35c-41c1-495e-b29d-5f7df4b94656 tempest-ServersV294TestFqdnHostnames-893294453 tempest-ServersV294TestFqdnHostnames-893294453-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 975.025167] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-fd23b35c-41c1-495e-b29d-5f7df4b94656 tempest-ServersV294TestFqdnHostnames-893294453 tempest-ServersV294TestFqdnHostnames-893294453-project-member] [instance: bba6c92b-cac3-4677-a8f4-57a2704fc685] Processing image 8150ca02-f879-471d-8913-459408f127a1 {{(pid=62503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 975.025437] env[62503]: DEBUG oslo_concurrency.lockutils [None req-fd23b35c-41c1-495e-b29d-5f7df4b94656 tempest-ServersV294TestFqdnHostnames-893294453 tempest-ServersV294TestFqdnHostnames-893294453-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 975.025593] env[62503]: DEBUG oslo_concurrency.lockutils [None req-fd23b35c-41c1-495e-b29d-5f7df4b94656 tempest-ServersV294TestFqdnHostnames-893294453 tempest-ServersV294TestFqdnHostnames-893294453-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 975.025774] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-fd23b35c-41c1-495e-b29d-5f7df4b94656 tempest-ServersV294TestFqdnHostnames-893294453 tempest-ServersV294TestFqdnHostnames-893294453-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 975.026066] env[62503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-82215a8f-61ad-4594-bb71-507c3a90a272 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.034313] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-fd23b35c-41c1-495e-b29d-5f7df4b94656 tempest-ServersV294TestFqdnHostnames-893294453 tempest-ServersV294TestFqdnHostnames-893294453-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 975.034509] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-fd23b35c-41c1-495e-b29d-5f7df4b94656 tempest-ServersV294TestFqdnHostnames-893294453 tempest-ServersV294TestFqdnHostnames-893294453-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 975.035240] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-33592f88-6602-4726-abe1-0872f377b390 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.040858] env[62503]: DEBUG oslo_vmware.api [None req-fd23b35c-41c1-495e-b29d-5f7df4b94656 tempest-ServersV294TestFqdnHostnames-893294453 tempest-ServersV294TestFqdnHostnames-893294453-project-member] Waiting for the task: (returnval){ [ 975.040858] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]5290715f-aba1-498d-3bb5-8ef2060120b0" [ 975.040858] env[62503]: _type = "Task" [ 975.040858] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 975.048438] env[62503]: DEBUG oslo_vmware.api [None req-fd23b35c-41c1-495e-b29d-5f7df4b94656 tempest-ServersV294TestFqdnHostnames-893294453 tempest-ServersV294TestFqdnHostnames-893294453-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]5290715f-aba1-498d-3bb5-8ef2060120b0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.066881] env[62503]: DEBUG nova.network.neutron [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] [instance: b120b29f-0e26-465f-bc6f-4214525ae2de] Successfully created port: dac5db33-8213-452f-8974-e4fcfb5e3013 {{(pid=62503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 975.103438] env[62503]: DEBUG nova.network.neutron [-] [instance: 12dff44c-ebb9-4fa3-8396-defcdb474152] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 975.195290] env[62503]: DEBUG oslo_vmware.api [None req-63bb133d-fd32-437e-9e45-f0ca22d8b321 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Task: {'id': task-1388055, 'name': ReconfigVM_Task, 'duration_secs': 0.273555} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 975.195658] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-63bb133d-fd32-437e-9e45-f0ca22d8b321 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: 4d7f62b3-42d0-4f98-bac4-541f116c9709] Reconfigured VM instance instance-0000005b to attach disk [datastore2] 4d7f62b3-42d0-4f98-bac4-541f116c9709/4d7f62b3-42d0-4f98-bac4-541f116c9709.vmdk or device None with type sparse {{(pid=62503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 975.196318] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d5f24d48-b722-412b-b9f6-82f1c260952a {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.202098] env[62503]: DEBUG oslo_vmware.api [None req-63bb133d-fd32-437e-9e45-f0ca22d8b321 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Waiting for the task: (returnval){ [ 975.202098] env[62503]: value = "task-1388056" [ 975.202098] env[62503]: _type = "Task" [ 975.202098] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 975.209847] env[62503]: DEBUG oslo_vmware.api [None req-63bb133d-fd32-437e-9e45-f0ca22d8b321 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Task: {'id': task-1388056, 'name': Rename_Task} progress is 5%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.266216] env[62503]: DEBUG nova.compute.manager [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] [instance: b120b29f-0e26-465f-bc6f-4214525ae2de] Start building block device mappings for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2867}} [ 975.394065] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-397f041d-9d30-4b14-9b89-d567a9f7383b {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.396763] env[62503]: DEBUG oslo_concurrency.lockutils [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Acquiring lock "refresh_cache-a4e7ec7f-82d1-4fd8-b9c4-bf869aa0968d" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 975.403187] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed0ae8d4-f9db-49d2-bf0d-72d1f6972401 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.435630] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c1d483b-4f88-4991-aa74-56e4a28ccd8f {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.443065] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92e0a690-c59e-41b0-9fc8-81a50738f8be {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.456340] env[62503]: DEBUG nova.compute.provider_tree [None req-11342709-c4fc-4d03-8c6f-f5c2d7bba682 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 975.464794] env[62503]: DEBUG nova.network.neutron [req-65fd8df6-3628-4f85-8f71-268afbb098e0 req-d73ee53a-8c53-435b-867a-4679e2787a12 service nova] [instance: a4e7ec7f-82d1-4fd8-b9c4-bf869aa0968d] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 975.533178] env[62503]: DEBUG nova.network.neutron [req-65fd8df6-3628-4f85-8f71-268afbb098e0 req-d73ee53a-8c53-435b-867a-4679e2787a12 service nova] [instance: a4e7ec7f-82d1-4fd8-b9c4-bf869aa0968d] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 975.551026] env[62503]: DEBUG oslo_vmware.api [None req-fd23b35c-41c1-495e-b29d-5f7df4b94656 tempest-ServersV294TestFqdnHostnames-893294453 tempest-ServersV294TestFqdnHostnames-893294453-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]5290715f-aba1-498d-3bb5-8ef2060120b0, 'name': SearchDatastore_Task, 'duration_secs': 0.009281} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 975.551808] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-25f0a992-dfe6-4d98-8612-ee96fb186ab2 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.556762] env[62503]: DEBUG oslo_vmware.api [None req-fd23b35c-41c1-495e-b29d-5f7df4b94656 tempest-ServersV294TestFqdnHostnames-893294453 tempest-ServersV294TestFqdnHostnames-893294453-project-member] Waiting for the task: (returnval){ [ 975.556762] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52662e73-a676-9355-d352-8233884a1810" [ 975.556762] env[62503]: _type = "Task" [ 975.556762] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 975.563921] env[62503]: DEBUG oslo_vmware.api [None req-fd23b35c-41c1-495e-b29d-5f7df4b94656 tempest-ServersV294TestFqdnHostnames-893294453 tempest-ServersV294TestFqdnHostnames-893294453-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52662e73-a676-9355-d352-8233884a1810, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.606107] env[62503]: INFO nova.compute.manager [-] [instance: 12dff44c-ebb9-4fa3-8396-defcdb474152] Took 1.28 seconds to deallocate network for instance. [ 975.710820] env[62503]: DEBUG oslo_vmware.api [None req-63bb133d-fd32-437e-9e45-f0ca22d8b321 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Task: {'id': task-1388056, 'name': Rename_Task, 'duration_secs': 0.13064} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 975.711168] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-63bb133d-fd32-437e-9e45-f0ca22d8b321 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: 4d7f62b3-42d0-4f98-bac4-541f116c9709] Powering on the VM {{(pid=62503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 975.711416] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ddfd9210-4ee1-49ea-bbfc-ad447946b60b {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.717314] env[62503]: DEBUG oslo_vmware.api [None req-63bb133d-fd32-437e-9e45-f0ca22d8b321 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Waiting for the task: (returnval){ [ 975.717314] env[62503]: value = "task-1388057" [ 975.717314] env[62503]: _type = "Task" [ 975.717314] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 975.725166] env[62503]: DEBUG oslo_vmware.api [None req-63bb133d-fd32-437e-9e45-f0ca22d8b321 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Task: {'id': task-1388057, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.771544] env[62503]: DEBUG nova.compute.manager [req-d260791f-59f7-46f4-8dea-446e9635be50 req-a0bb6ef8-c353-467b-8732-1a35bd832dbd service nova] [instance: 12dff44c-ebb9-4fa3-8396-defcdb474152] Received event network-vif-deleted-3907d209-7ee9-44ae-981d-06c45d389c82 {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 975.959296] env[62503]: DEBUG nova.scheduler.client.report [None req-11342709-c4fc-4d03-8c6f-f5c2d7bba682 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 976.036144] env[62503]: DEBUG oslo_concurrency.lockutils [req-65fd8df6-3628-4f85-8f71-268afbb098e0 req-d73ee53a-8c53-435b-867a-4679e2787a12 service nova] Releasing lock "refresh_cache-a4e7ec7f-82d1-4fd8-b9c4-bf869aa0968d" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 976.036601] env[62503]: DEBUG oslo_concurrency.lockutils [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Acquired lock "refresh_cache-a4e7ec7f-82d1-4fd8-b9c4-bf869aa0968d" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 976.036708] env[62503]: DEBUG nova.network.neutron [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] [instance: a4e7ec7f-82d1-4fd8-b9c4-bf869aa0968d] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 976.070049] env[62503]: DEBUG oslo_vmware.api [None req-fd23b35c-41c1-495e-b29d-5f7df4b94656 tempest-ServersV294TestFqdnHostnames-893294453 tempest-ServersV294TestFqdnHostnames-893294453-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52662e73-a676-9355-d352-8233884a1810, 'name': SearchDatastore_Task, 'duration_secs': 0.009226} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 976.070049] env[62503]: DEBUG oslo_concurrency.lockutils [None req-fd23b35c-41c1-495e-b29d-5f7df4b94656 tempest-ServersV294TestFqdnHostnames-893294453 tempest-ServersV294TestFqdnHostnames-893294453-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 976.070049] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd23b35c-41c1-495e-b29d-5f7df4b94656 tempest-ServersV294TestFqdnHostnames-893294453 tempest-ServersV294TestFqdnHostnames-893294453-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk to [datastore2] bba6c92b-cac3-4677-a8f4-57a2704fc685/bba6c92b-cac3-4677-a8f4-57a2704fc685.vmdk {{(pid=62503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 976.070297] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1b20f296-2c41-4b72-93c8-2c64d39f7329 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.076563] env[62503]: DEBUG oslo_vmware.api [None req-fd23b35c-41c1-495e-b29d-5f7df4b94656 tempest-ServersV294TestFqdnHostnames-893294453 tempest-ServersV294TestFqdnHostnames-893294453-project-member] Waiting for the task: (returnval){ [ 976.076563] env[62503]: value = "task-1388058" [ 976.076563] env[62503]: _type = "Task" [ 976.076563] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 976.084621] env[62503]: DEBUG oslo_vmware.api [None req-fd23b35c-41c1-495e-b29d-5f7df4b94656 tempest-ServersV294TestFqdnHostnames-893294453 tempest-ServersV294TestFqdnHostnames-893294453-project-member] Task: {'id': task-1388058, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.111953] env[62503]: DEBUG oslo_concurrency.lockutils [None req-a8dd5bc6-9259-46ab-bd5b-40003e4e3b6a tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 976.228893] env[62503]: DEBUG oslo_vmware.api [None req-63bb133d-fd32-437e-9e45-f0ca22d8b321 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Task: {'id': task-1388057, 'name': PowerOnVM_Task, 'duration_secs': 0.488548} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 976.229289] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-63bb133d-fd32-437e-9e45-f0ca22d8b321 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: 4d7f62b3-42d0-4f98-bac4-541f116c9709] Powered on the VM {{(pid=62503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 976.229602] env[62503]: INFO nova.compute.manager [None req-63bb133d-fd32-437e-9e45-f0ca22d8b321 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: 4d7f62b3-42d0-4f98-bac4-541f116c9709] Took 6.75 seconds to spawn the instance on the hypervisor. [ 976.229885] env[62503]: DEBUG nova.compute.manager [None req-63bb133d-fd32-437e-9e45-f0ca22d8b321 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: 4d7f62b3-42d0-4f98-bac4-541f116c9709] Checking state {{(pid=62503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1794}} [ 976.230868] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5796a172-40f0-4d0f-adf0-09a33b0d1e4d {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.278773] env[62503]: DEBUG nova.compute.manager [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] [instance: b120b29f-0e26-465f-bc6f-4214525ae2de] Start spawning the instance on the hypervisor. {{(pid=62503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2641}} [ 976.306391] env[62503]: DEBUG nova.virt.hardware [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:26:20Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:26:03Z,direct_url=,disk_format='vmdk',id=8150ca02-f879-471d-8913-459408f127a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26d9ee75d25b4018b8bb1fb11c8bd98c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:26:04Z,virtual_size=,visibility=), allow threads: False {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 976.306665] env[62503]: DEBUG nova.virt.hardware [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Flavor limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 976.306827] env[62503]: DEBUG nova.virt.hardware [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Image limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 976.307023] env[62503]: DEBUG nova.virt.hardware [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Flavor pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 976.307180] env[62503]: DEBUG nova.virt.hardware [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Image pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 976.307332] env[62503]: DEBUG nova.virt.hardware [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 976.307547] env[62503]: DEBUG nova.virt.hardware [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 976.307714] env[62503]: DEBUG nova.virt.hardware [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 976.307891] env[62503]: DEBUG nova.virt.hardware [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Got 1 possible topologies {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 976.308071] env[62503]: DEBUG nova.virt.hardware [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 976.308254] env[62503]: DEBUG nova.virt.hardware [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 976.309145] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77f187da-8a33-4feb-b9d3-ce8a706da728 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.318047] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf805d84-6fde-4225-a5c9-070d610bb68b {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.464385] env[62503]: DEBUG oslo_concurrency.lockutils [None req-11342709-c4fc-4d03-8c6f-f5c2d7bba682 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.210s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 976.464939] env[62503]: DEBUG nova.compute.manager [None req-11342709-c4fc-4d03-8c6f-f5c2d7bba682 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: ccc542a3-ff01-42ca-965e-706bed4c6e07] Start building networks asynchronously for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2832}} [ 976.467852] env[62503]: DEBUG oslo_concurrency.lockutils [None req-a8dd5bc6-9259-46ab-bd5b-40003e4e3b6a tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.356s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 976.468154] env[62503]: DEBUG nova.objects.instance [None req-a8dd5bc6-9259-46ab-bd5b-40003e4e3b6a tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Lazy-loading 'resources' on Instance uuid 12dff44c-ebb9-4fa3-8396-defcdb474152 {{(pid=62503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 976.572274] env[62503]: DEBUG nova.network.neutron [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] [instance: a4e7ec7f-82d1-4fd8-b9c4-bf869aa0968d] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 976.575660] env[62503]: DEBUG nova.network.neutron [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] [instance: b120b29f-0e26-465f-bc6f-4214525ae2de] Successfully updated port: dac5db33-8213-452f-8974-e4fcfb5e3013 {{(pid=62503) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 976.585961] env[62503]: DEBUG oslo_vmware.api [None req-fd23b35c-41c1-495e-b29d-5f7df4b94656 tempest-ServersV294TestFqdnHostnames-893294453 tempest-ServersV294TestFqdnHostnames-893294453-project-member] Task: {'id': task-1388058, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.479344} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 976.586855] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd23b35c-41c1-495e-b29d-5f7df4b94656 tempest-ServersV294TestFqdnHostnames-893294453 tempest-ServersV294TestFqdnHostnames-893294453-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk to [datastore2] bba6c92b-cac3-4677-a8f4-57a2704fc685/bba6c92b-cac3-4677-a8f4-57a2704fc685.vmdk {{(pid=62503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 976.587119] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-fd23b35c-41c1-495e-b29d-5f7df4b94656 tempest-ServersV294TestFqdnHostnames-893294453 tempest-ServersV294TestFqdnHostnames-893294453-project-member] [instance: bba6c92b-cac3-4677-a8f4-57a2704fc685] Extending root virtual disk to 1048576 {{(pid=62503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 976.587361] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-954b50b3-dcd7-463c-8cc7-f734d6c96a4c {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.595350] env[62503]: DEBUG oslo_vmware.api [None req-fd23b35c-41c1-495e-b29d-5f7df4b94656 tempest-ServersV294TestFqdnHostnames-893294453 tempest-ServersV294TestFqdnHostnames-893294453-project-member] Waiting for the task: (returnval){ [ 976.595350] env[62503]: value = "task-1388059" [ 976.595350] env[62503]: _type = "Task" [ 976.595350] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 976.603650] env[62503]: DEBUG oslo_vmware.api [None req-fd23b35c-41c1-495e-b29d-5f7df4b94656 tempest-ServersV294TestFqdnHostnames-893294453 tempest-ServersV294TestFqdnHostnames-893294453-project-member] Task: {'id': task-1388059, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.700249] env[62503]: DEBUG nova.network.neutron [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] [instance: a4e7ec7f-82d1-4fd8-b9c4-bf869aa0968d] Updating instance_info_cache with network_info: [{"id": "6d46d504-d2d1-40ff-ac6f-6355cc56d1d4", "address": "fa:16:3e:c5:b9:1a", "network": {"id": "7fbaee7b-bc73-4090-b263-cba558fcc93a", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1602059693-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c9b69dfe9a9a44188c612fd777341101", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "457c42cd-4ddb-4374-923e-d419b7f6eaff", "external-id": "nsx-vlan-transportzone-575", "segmentation_id": 575, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6d46d504-d2", "ovs_interfaceid": "6d46d504-d2d1-40ff-ac6f-6355cc56d1d4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 976.747054] env[62503]: INFO nova.compute.manager [None req-63bb133d-fd32-437e-9e45-f0ca22d8b321 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: 4d7f62b3-42d0-4f98-bac4-541f116c9709] Took 11.51 seconds to build instance. [ 976.974919] env[62503]: DEBUG nova.compute.utils [None req-11342709-c4fc-4d03-8c6f-f5c2d7bba682 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Using /dev/sd instead of None {{(pid=62503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 976.976921] env[62503]: DEBUG nova.compute.manager [None req-11342709-c4fc-4d03-8c6f-f5c2d7bba682 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: ccc542a3-ff01-42ca-965e-706bed4c6e07] Allocating IP information in the background. {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 976.977170] env[62503]: DEBUG nova.network.neutron [None req-11342709-c4fc-4d03-8c6f-f5c2d7bba682 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: ccc542a3-ff01-42ca-965e-706bed4c6e07] allocate_for_instance() {{(pid=62503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 977.025610] env[62503]: DEBUG nova.policy [None req-11342709-c4fc-4d03-8c6f-f5c2d7bba682 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b076e358f78e4874876f90d96fd612e5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e818e5ee9dc24efa96747c9558514a15', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62503) authorize /opt/stack/nova/nova/policy.py:201}} [ 977.078093] env[62503]: DEBUG oslo_concurrency.lockutils [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Acquiring lock "refresh_cache-b120b29f-0e26-465f-bc6f-4214525ae2de" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 977.078093] env[62503]: DEBUG oslo_concurrency.lockutils [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Acquired lock "refresh_cache-b120b29f-0e26-465f-bc6f-4214525ae2de" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 977.078352] env[62503]: DEBUG nova.network.neutron [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] [instance: b120b29f-0e26-465f-bc6f-4214525ae2de] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 977.107926] env[62503]: DEBUG oslo_vmware.api [None req-fd23b35c-41c1-495e-b29d-5f7df4b94656 tempest-ServersV294TestFqdnHostnames-893294453 tempest-ServersV294TestFqdnHostnames-893294453-project-member] Task: {'id': task-1388059, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.059119} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 977.108221] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-fd23b35c-41c1-495e-b29d-5f7df4b94656 tempest-ServersV294TestFqdnHostnames-893294453 tempest-ServersV294TestFqdnHostnames-893294453-project-member] [instance: bba6c92b-cac3-4677-a8f4-57a2704fc685] Extended root virtual disk {{(pid=62503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 977.109007] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0978efdc-7e45-4147-a966-f8656ce2aa6f {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.114430] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ee6646b-a7bb-4a65-8f0e-6b50ea68a191 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.135657] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-fd23b35c-41c1-495e-b29d-5f7df4b94656 tempest-ServersV294TestFqdnHostnames-893294453 tempest-ServersV294TestFqdnHostnames-893294453-project-member] [instance: bba6c92b-cac3-4677-a8f4-57a2704fc685] Reconfiguring VM instance instance-0000005c to attach disk [datastore2] bba6c92b-cac3-4677-a8f4-57a2704fc685/bba6c92b-cac3-4677-a8f4-57a2704fc685.vmdk or device None with type sparse {{(pid=62503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 977.136737] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f416f7ef-54d5-44fc-aef4-28ac525d4ee0 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.157087] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90ab7c08-f767-4bec-99db-4ee98b2591cc {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.161590] env[62503]: DEBUG oslo_vmware.api [None req-fd23b35c-41c1-495e-b29d-5f7df4b94656 tempest-ServersV294TestFqdnHostnames-893294453 tempest-ServersV294TestFqdnHostnames-893294453-project-member] Waiting for the task: (returnval){ [ 977.161590] env[62503]: value = "task-1388060" [ 977.161590] env[62503]: _type = "Task" [ 977.161590] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 977.191248] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04c8e897-d1fc-4737-a8e1-027fc566f21e {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.198026] env[62503]: DEBUG oslo_vmware.api [None req-fd23b35c-41c1-495e-b29d-5f7df4b94656 tempest-ServersV294TestFqdnHostnames-893294453 tempest-ServersV294TestFqdnHostnames-893294453-project-member] Task: {'id': task-1388060, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.202068] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79364f7e-2ec1-4427-a9d8-420244aaab75 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.206120] env[62503]: DEBUG oslo_concurrency.lockutils [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Releasing lock "refresh_cache-a4e7ec7f-82d1-4fd8-b9c4-bf869aa0968d" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 977.206424] env[62503]: DEBUG nova.compute.manager [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] [instance: a4e7ec7f-82d1-4fd8-b9c4-bf869aa0968d] Instance network_info: |[{"id": "6d46d504-d2d1-40ff-ac6f-6355cc56d1d4", "address": "fa:16:3e:c5:b9:1a", "network": {"id": "7fbaee7b-bc73-4090-b263-cba558fcc93a", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1602059693-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c9b69dfe9a9a44188c612fd777341101", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "457c42cd-4ddb-4374-923e-d419b7f6eaff", "external-id": "nsx-vlan-transportzone-575", "segmentation_id": 575, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6d46d504-d2", "ovs_interfaceid": "6d46d504-d2d1-40ff-ac6f-6355cc56d1d4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1999}} [ 977.206824] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] [instance: a4e7ec7f-82d1-4fd8-b9c4-bf869aa0968d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c5:b9:1a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '457c42cd-4ddb-4374-923e-d419b7f6eaff', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6d46d504-d2d1-40ff-ac6f-6355cc56d1d4', 'vif_model': 'vmxnet3'}] {{(pid=62503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 977.214057] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Creating folder: Project (c9b69dfe9a9a44188c612fd777341101). Parent ref: group-v294540. {{(pid=62503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 977.214697] env[62503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b8d3c98d-f2de-48b9-a1a7-acfff14887d1 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.226411] env[62503]: DEBUG nova.compute.provider_tree [None req-a8dd5bc6-9259-46ab-bd5b-40003e4e3b6a tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 977.236018] env[62503]: INFO nova.virt.vmwareapi.vm_util [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Created folder: Project (c9b69dfe9a9a44188c612fd777341101) in parent group-v294540. [ 977.236211] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Creating folder: Instances. Parent ref: group-v294630. {{(pid=62503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 977.236443] env[62503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b44ab645-94a6-4328-9101-ef8197ee6f1a {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.245906] env[62503]: INFO nova.virt.vmwareapi.vm_util [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Created folder: Instances in parent group-v294630. [ 977.246161] env[62503]: DEBUG oslo.service.loopingcall [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 977.246352] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a4e7ec7f-82d1-4fd8-b9c4-bf869aa0968d] Creating VM on the ESX host {{(pid=62503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 977.246544] env[62503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a9ea88af-f1fc-428b-bf0b-a31b7cb2890f {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.260576] env[62503]: DEBUG oslo_concurrency.lockutils [None req-63bb133d-fd32-437e-9e45-f0ca22d8b321 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Lock "4d7f62b3-42d0-4f98-bac4-541f116c9709" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.032s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 977.265324] env[62503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 977.265324] env[62503]: value = "task-1388063" [ 977.265324] env[62503]: _type = "Task" [ 977.265324] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 977.272471] env[62503]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388063, 'name': CreateVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.352376] env[62503]: DEBUG nova.network.neutron [None req-11342709-c4fc-4d03-8c6f-f5c2d7bba682 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: ccc542a3-ff01-42ca-965e-706bed4c6e07] Successfully created port: a06bfb1f-0b51-4150-8e23-cdfe68e9c27f {{(pid=62503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 977.480998] env[62503]: DEBUG nova.compute.manager [None req-11342709-c4fc-4d03-8c6f-f5c2d7bba682 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: ccc542a3-ff01-42ca-965e-706bed4c6e07] Start building block device mappings for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2867}} [ 977.545852] env[62503]: DEBUG oslo_concurrency.lockutils [None req-c651edfa-b32c-49eb-b6dd-6e17d350a552 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Acquiring lock "4d7f62b3-42d0-4f98-bac4-541f116c9709" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 977.546133] env[62503]: DEBUG oslo_concurrency.lockutils [None req-c651edfa-b32c-49eb-b6dd-6e17d350a552 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Lock "4d7f62b3-42d0-4f98-bac4-541f116c9709" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 977.546375] env[62503]: DEBUG oslo_concurrency.lockutils [None req-c651edfa-b32c-49eb-b6dd-6e17d350a552 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Acquiring lock "4d7f62b3-42d0-4f98-bac4-541f116c9709-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 977.546536] env[62503]: DEBUG oslo_concurrency.lockutils [None req-c651edfa-b32c-49eb-b6dd-6e17d350a552 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Lock "4d7f62b3-42d0-4f98-bac4-541f116c9709-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 977.546710] env[62503]: DEBUG oslo_concurrency.lockutils [None req-c651edfa-b32c-49eb-b6dd-6e17d350a552 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Lock "4d7f62b3-42d0-4f98-bac4-541f116c9709-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 977.548707] env[62503]: INFO nova.compute.manager [None req-c651edfa-b32c-49eb-b6dd-6e17d350a552 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: 4d7f62b3-42d0-4f98-bac4-541f116c9709] Terminating instance [ 977.550519] env[62503]: DEBUG nova.compute.manager [None req-c651edfa-b32c-49eb-b6dd-6e17d350a552 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: 4d7f62b3-42d0-4f98-bac4-541f116c9709] Start destroying the instance on the hypervisor. {{(pid=62503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3156}} [ 977.550716] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-c651edfa-b32c-49eb-b6dd-6e17d350a552 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: 4d7f62b3-42d0-4f98-bac4-541f116c9709] Destroying instance {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 977.551661] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7686912b-d76d-4fe2-8f5a-87f3d6464d16 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.559154] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-c651edfa-b32c-49eb-b6dd-6e17d350a552 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: 4d7f62b3-42d0-4f98-bac4-541f116c9709] Powering off the VM {{(pid=62503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 977.559288] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fac221d8-4758-441f-ad90-aa7937ecbe99 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.565026] env[62503]: DEBUG oslo_vmware.api [None req-c651edfa-b32c-49eb-b6dd-6e17d350a552 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Waiting for the task: (returnval){ [ 977.565026] env[62503]: value = "task-1388064" [ 977.565026] env[62503]: _type = "Task" [ 977.565026] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 977.572734] env[62503]: DEBUG oslo_vmware.api [None req-c651edfa-b32c-49eb-b6dd-6e17d350a552 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Task: {'id': task-1388064, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.627846] env[62503]: DEBUG nova.network.neutron [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] [instance: b120b29f-0e26-465f-bc6f-4214525ae2de] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 977.674672] env[62503]: DEBUG oslo_vmware.api [None req-fd23b35c-41c1-495e-b29d-5f7df4b94656 tempest-ServersV294TestFqdnHostnames-893294453 tempest-ServersV294TestFqdnHostnames-893294453-project-member] Task: {'id': task-1388060, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.729671] env[62503]: DEBUG nova.scheduler.client.report [None req-a8dd5bc6-9259-46ab-bd5b-40003e4e3b6a tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 977.776100] env[62503]: DEBUG nova.network.neutron [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] [instance: b120b29f-0e26-465f-bc6f-4214525ae2de] Updating instance_info_cache with network_info: [{"id": "dac5db33-8213-452f-8974-e4fcfb5e3013", "address": "fa:16:3e:2d:e7:de", "network": {"id": "7fbaee7b-bc73-4090-b263-cba558fcc93a", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1602059693-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c9b69dfe9a9a44188c612fd777341101", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "457c42cd-4ddb-4374-923e-d419b7f6eaff", "external-id": "nsx-vlan-transportzone-575", "segmentation_id": 575, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdac5db33-82", "ovs_interfaceid": "dac5db33-8213-452f-8974-e4fcfb5e3013", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 977.777550] env[62503]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388063, 'name': CreateVM_Task} progress is 99%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.799491] env[62503]: DEBUG nova.compute.manager [req-050ac06a-750c-4a94-b243-48f8ac52f8b3 req-8c622bf5-da92-43ce-9e87-fd3df27083a0 service nova] [instance: b120b29f-0e26-465f-bc6f-4214525ae2de] Received event network-vif-plugged-dac5db33-8213-452f-8974-e4fcfb5e3013 {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 977.799491] env[62503]: DEBUG oslo_concurrency.lockutils [req-050ac06a-750c-4a94-b243-48f8ac52f8b3 req-8c622bf5-da92-43ce-9e87-fd3df27083a0 service nova] Acquiring lock "b120b29f-0e26-465f-bc6f-4214525ae2de-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 977.799654] env[62503]: DEBUG oslo_concurrency.lockutils [req-050ac06a-750c-4a94-b243-48f8ac52f8b3 req-8c622bf5-da92-43ce-9e87-fd3df27083a0 service nova] Lock "b120b29f-0e26-465f-bc6f-4214525ae2de-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 977.799734] env[62503]: DEBUG oslo_concurrency.lockutils [req-050ac06a-750c-4a94-b243-48f8ac52f8b3 req-8c622bf5-da92-43ce-9e87-fd3df27083a0 service nova] Lock "b120b29f-0e26-465f-bc6f-4214525ae2de-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 977.799895] env[62503]: DEBUG nova.compute.manager [req-050ac06a-750c-4a94-b243-48f8ac52f8b3 req-8c622bf5-da92-43ce-9e87-fd3df27083a0 service nova] [instance: b120b29f-0e26-465f-bc6f-4214525ae2de] No waiting events found dispatching network-vif-plugged-dac5db33-8213-452f-8974-e4fcfb5e3013 {{(pid=62503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 977.800131] env[62503]: WARNING nova.compute.manager [req-050ac06a-750c-4a94-b243-48f8ac52f8b3 req-8c622bf5-da92-43ce-9e87-fd3df27083a0 service nova] [instance: b120b29f-0e26-465f-bc6f-4214525ae2de] Received unexpected event network-vif-plugged-dac5db33-8213-452f-8974-e4fcfb5e3013 for instance with vm_state building and task_state spawning. [ 977.800272] env[62503]: DEBUG nova.compute.manager [req-050ac06a-750c-4a94-b243-48f8ac52f8b3 req-8c622bf5-da92-43ce-9e87-fd3df27083a0 service nova] [instance: b120b29f-0e26-465f-bc6f-4214525ae2de] Received event network-changed-dac5db33-8213-452f-8974-e4fcfb5e3013 {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 977.800452] env[62503]: DEBUG nova.compute.manager [req-050ac06a-750c-4a94-b243-48f8ac52f8b3 req-8c622bf5-da92-43ce-9e87-fd3df27083a0 service nova] [instance: b120b29f-0e26-465f-bc6f-4214525ae2de] Refreshing instance network info cache due to event network-changed-dac5db33-8213-452f-8974-e4fcfb5e3013. {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11432}} [ 977.800629] env[62503]: DEBUG oslo_concurrency.lockutils [req-050ac06a-750c-4a94-b243-48f8ac52f8b3 req-8c622bf5-da92-43ce-9e87-fd3df27083a0 service nova] Acquiring lock "refresh_cache-b120b29f-0e26-465f-bc6f-4214525ae2de" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 978.074396] env[62503]: DEBUG oslo_vmware.api [None req-c651edfa-b32c-49eb-b6dd-6e17d350a552 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Task: {'id': task-1388064, 'name': PowerOffVM_Task, 'duration_secs': 0.190759} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 978.074673] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-c651edfa-b32c-49eb-b6dd-6e17d350a552 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: 4d7f62b3-42d0-4f98-bac4-541f116c9709] Powered off the VM {{(pid=62503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 978.074848] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-c651edfa-b32c-49eb-b6dd-6e17d350a552 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: 4d7f62b3-42d0-4f98-bac4-541f116c9709] Unregistering the VM {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 978.075112] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-dd6d89b6-2577-4410-9d37-85e5803e427d {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.146459] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-c651edfa-b32c-49eb-b6dd-6e17d350a552 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: 4d7f62b3-42d0-4f98-bac4-541f116c9709] Unregistered the VM {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 978.146757] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-c651edfa-b32c-49eb-b6dd-6e17d350a552 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: 4d7f62b3-42d0-4f98-bac4-541f116c9709] Deleting contents of the VM from datastore datastore2 {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 978.147010] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-c651edfa-b32c-49eb-b6dd-6e17d350a552 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Deleting the datastore file [datastore2] 4d7f62b3-42d0-4f98-bac4-541f116c9709 {{(pid=62503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 978.147502] env[62503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-36eab850-f1fb-4257-8ada-78092c2e3914 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.153616] env[62503]: DEBUG oslo_vmware.api [None req-c651edfa-b32c-49eb-b6dd-6e17d350a552 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Waiting for the task: (returnval){ [ 978.153616] env[62503]: value = "task-1388066" [ 978.153616] env[62503]: _type = "Task" [ 978.153616] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 978.161948] env[62503]: DEBUG oslo_vmware.api [None req-c651edfa-b32c-49eb-b6dd-6e17d350a552 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Task: {'id': task-1388066, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.170722] env[62503]: DEBUG oslo_vmware.api [None req-fd23b35c-41c1-495e-b29d-5f7df4b94656 tempest-ServersV294TestFqdnHostnames-893294453 tempest-ServersV294TestFqdnHostnames-893294453-project-member] Task: {'id': task-1388060, 'name': ReconfigVM_Task, 'duration_secs': 0.626843} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 978.170976] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-fd23b35c-41c1-495e-b29d-5f7df4b94656 tempest-ServersV294TestFqdnHostnames-893294453 tempest-ServersV294TestFqdnHostnames-893294453-project-member] [instance: bba6c92b-cac3-4677-a8f4-57a2704fc685] Reconfigured VM instance instance-0000005c to attach disk [datastore2] bba6c92b-cac3-4677-a8f4-57a2704fc685/bba6c92b-cac3-4677-a8f4-57a2704fc685.vmdk or device None with type sparse {{(pid=62503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 978.171597] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ddb5c4b2-bb20-4690-a271-f29960e204cd {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.177373] env[62503]: DEBUG oslo_vmware.api [None req-fd23b35c-41c1-495e-b29d-5f7df4b94656 tempest-ServersV294TestFqdnHostnames-893294453 tempest-ServersV294TestFqdnHostnames-893294453-project-member] Waiting for the task: (returnval){ [ 978.177373] env[62503]: value = "task-1388067" [ 978.177373] env[62503]: _type = "Task" [ 978.177373] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 978.186360] env[62503]: DEBUG oslo_vmware.api [None req-fd23b35c-41c1-495e-b29d-5f7df4b94656 tempest-ServersV294TestFqdnHostnames-893294453 tempest-ServersV294TestFqdnHostnames-893294453-project-member] Task: {'id': task-1388067, 'name': Rename_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.235372] env[62503]: DEBUG oslo_concurrency.lockutils [None req-a8dd5bc6-9259-46ab-bd5b-40003e4e3b6a tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.767s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 978.261515] env[62503]: INFO nova.scheduler.client.report [None req-a8dd5bc6-9259-46ab-bd5b-40003e4e3b6a tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Deleted allocations for instance 12dff44c-ebb9-4fa3-8396-defcdb474152 [ 978.276783] env[62503]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388063, 'name': CreateVM_Task, 'duration_secs': 0.513701} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 978.276946] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a4e7ec7f-82d1-4fd8-b9c4-bf869aa0968d] Created VM on the ESX host {{(pid=62503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 978.277603] env[62503]: DEBUG oslo_concurrency.lockutils [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 978.277768] env[62503]: DEBUG oslo_concurrency.lockutils [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 978.278090] env[62503]: DEBUG oslo_concurrency.lockutils [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 978.278528] env[62503]: DEBUG oslo_concurrency.lockutils [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Releasing lock "refresh_cache-b120b29f-0e26-465f-bc6f-4214525ae2de" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 978.278785] env[62503]: DEBUG nova.compute.manager [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] [instance: b120b29f-0e26-465f-bc6f-4214525ae2de] Instance network_info: |[{"id": "dac5db33-8213-452f-8974-e4fcfb5e3013", "address": "fa:16:3e:2d:e7:de", "network": {"id": "7fbaee7b-bc73-4090-b263-cba558fcc93a", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1602059693-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c9b69dfe9a9a44188c612fd777341101", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "457c42cd-4ddb-4374-923e-d419b7f6eaff", "external-id": "nsx-vlan-transportzone-575", "segmentation_id": 575, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdac5db33-82", "ovs_interfaceid": "dac5db33-8213-452f-8974-e4fcfb5e3013", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1999}} [ 978.279658] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ed974108-6b5c-4aae-a3e6-d2b33b0d88d5 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.281061] env[62503]: DEBUG oslo_concurrency.lockutils [req-050ac06a-750c-4a94-b243-48f8ac52f8b3 req-8c622bf5-da92-43ce-9e87-fd3df27083a0 service nova] Acquired lock "refresh_cache-b120b29f-0e26-465f-bc6f-4214525ae2de" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 978.281251] env[62503]: DEBUG nova.network.neutron [req-050ac06a-750c-4a94-b243-48f8ac52f8b3 req-8c622bf5-da92-43ce-9e87-fd3df27083a0 service nova] [instance: b120b29f-0e26-465f-bc6f-4214525ae2de] Refreshing network info cache for port dac5db33-8213-452f-8974-e4fcfb5e3013 {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 978.282433] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] [instance: b120b29f-0e26-465f-bc6f-4214525ae2de] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2d:e7:de', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '457c42cd-4ddb-4374-923e-d419b7f6eaff', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'dac5db33-8213-452f-8974-e4fcfb5e3013', 'vif_model': 'vmxnet3'}] {{(pid=62503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 978.290990] env[62503]: DEBUG oslo.service.loopingcall [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 978.291455] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b120b29f-0e26-465f-bc6f-4214525ae2de] Creating VM on the ESX host {{(pid=62503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 978.292124] env[62503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-221eaaf7-1a52-4343-8473-e0af8ee907de {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.310773] env[62503]: DEBUG oslo_vmware.api [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Waiting for the task: (returnval){ [ 978.310773] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]522cfa62-8d01-cc74-4e4e-b2a7c81af3fb" [ 978.310773] env[62503]: _type = "Task" [ 978.310773] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 978.315548] env[62503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 978.315548] env[62503]: value = "task-1388068" [ 978.315548] env[62503]: _type = "Task" [ 978.315548] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 978.322339] env[62503]: DEBUG oslo_vmware.api [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]522cfa62-8d01-cc74-4e4e-b2a7c81af3fb, 'name': SearchDatastore_Task, 'duration_secs': 0.011412} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 978.322920] env[62503]: DEBUG oslo_concurrency.lockutils [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 978.323182] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] [instance: a4e7ec7f-82d1-4fd8-b9c4-bf869aa0968d] Processing image 8150ca02-f879-471d-8913-459408f127a1 {{(pid=62503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 978.323416] env[62503]: DEBUG oslo_concurrency.lockutils [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 978.323569] env[62503]: DEBUG oslo_concurrency.lockutils [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 978.323746] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 978.323979] env[62503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f2be1519-8a9f-490b-8c37-eb43d2a53fe5 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.328293] env[62503]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388068, 'name': CreateVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.334024] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 978.334208] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 978.334855] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6abf9c36-3f29-44a5-8689-83e3b26c7e5c {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.339920] env[62503]: DEBUG oslo_vmware.api [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Waiting for the task: (returnval){ [ 978.339920] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]523b9bba-f820-b229-9255-f6623fe2f824" [ 978.339920] env[62503]: _type = "Task" [ 978.339920] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 978.346895] env[62503]: DEBUG oslo_vmware.api [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]523b9bba-f820-b229-9255-f6623fe2f824, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.492991] env[62503]: DEBUG nova.compute.manager [None req-11342709-c4fc-4d03-8c6f-f5c2d7bba682 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: ccc542a3-ff01-42ca-965e-706bed4c6e07] Start spawning the instance on the hypervisor. {{(pid=62503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2641}} [ 978.522118] env[62503]: DEBUG nova.virt.hardware [None req-11342709-c4fc-4d03-8c6f-f5c2d7bba682 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:26:20Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:26:03Z,direct_url=,disk_format='vmdk',id=8150ca02-f879-471d-8913-459408f127a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26d9ee75d25b4018b8bb1fb11c8bd98c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:26:04Z,virtual_size=,visibility=), allow threads: False {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 978.522423] env[62503]: DEBUG nova.virt.hardware [None req-11342709-c4fc-4d03-8c6f-f5c2d7bba682 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Flavor limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 978.522621] env[62503]: DEBUG nova.virt.hardware [None req-11342709-c4fc-4d03-8c6f-f5c2d7bba682 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Image limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 978.522841] env[62503]: DEBUG nova.virt.hardware [None req-11342709-c4fc-4d03-8c6f-f5c2d7bba682 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Flavor pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 978.523012] env[62503]: DEBUG nova.virt.hardware [None req-11342709-c4fc-4d03-8c6f-f5c2d7bba682 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Image pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 978.523184] env[62503]: DEBUG nova.virt.hardware [None req-11342709-c4fc-4d03-8c6f-f5c2d7bba682 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 978.523415] env[62503]: DEBUG nova.virt.hardware [None req-11342709-c4fc-4d03-8c6f-f5c2d7bba682 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 978.523600] env[62503]: DEBUG nova.virt.hardware [None req-11342709-c4fc-4d03-8c6f-f5c2d7bba682 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 978.525026] env[62503]: DEBUG nova.virt.hardware [None req-11342709-c4fc-4d03-8c6f-f5c2d7bba682 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Got 1 possible topologies {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 978.525026] env[62503]: DEBUG nova.virt.hardware [None req-11342709-c4fc-4d03-8c6f-f5c2d7bba682 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 978.525026] env[62503]: DEBUG nova.virt.hardware [None req-11342709-c4fc-4d03-8c6f-f5c2d7bba682 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 978.525026] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eee81baa-6e25-44f9-8c10-ff72f51ca485 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.533065] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-593a512c-79a9-4dfc-9a72-aedd5850d314 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.664212] env[62503]: DEBUG oslo_vmware.api [None req-c651edfa-b32c-49eb-b6dd-6e17d350a552 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Task: {'id': task-1388066, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.148216} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 978.664512] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-c651edfa-b32c-49eb-b6dd-6e17d350a552 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Deleted the datastore file {{(pid=62503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 978.664758] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-c651edfa-b32c-49eb-b6dd-6e17d350a552 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: 4d7f62b3-42d0-4f98-bac4-541f116c9709] Deleted contents of the VM from datastore datastore2 {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 978.665228] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-c651edfa-b32c-49eb-b6dd-6e17d350a552 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: 4d7f62b3-42d0-4f98-bac4-541f116c9709] Instance destroyed {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 978.665228] env[62503]: INFO nova.compute.manager [None req-c651edfa-b32c-49eb-b6dd-6e17d350a552 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: 4d7f62b3-42d0-4f98-bac4-541f116c9709] Took 1.11 seconds to destroy the instance on the hypervisor. [ 978.665433] env[62503]: DEBUG oslo.service.loopingcall [None req-c651edfa-b32c-49eb-b6dd-6e17d350a552 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 978.665568] env[62503]: DEBUG nova.compute.manager [-] [instance: 4d7f62b3-42d0-4f98-bac4-541f116c9709] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 978.665651] env[62503]: DEBUG nova.network.neutron [-] [instance: 4d7f62b3-42d0-4f98-bac4-541f116c9709] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 978.688431] env[62503]: DEBUG oslo_vmware.api [None req-fd23b35c-41c1-495e-b29d-5f7df4b94656 tempest-ServersV294TestFqdnHostnames-893294453 tempest-ServersV294TestFqdnHostnames-893294453-project-member] Task: {'id': task-1388067, 'name': Rename_Task, 'duration_secs': 0.155548} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 978.688763] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd23b35c-41c1-495e-b29d-5f7df4b94656 tempest-ServersV294TestFqdnHostnames-893294453 tempest-ServersV294TestFqdnHostnames-893294453-project-member] [instance: bba6c92b-cac3-4677-a8f4-57a2704fc685] Powering on the VM {{(pid=62503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 978.689050] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-58412a7d-3a60-45d6-8fd5-cb2d66d92f40 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.696641] env[62503]: DEBUG oslo_vmware.api [None req-fd23b35c-41c1-495e-b29d-5f7df4b94656 tempest-ServersV294TestFqdnHostnames-893294453 tempest-ServersV294TestFqdnHostnames-893294453-project-member] Waiting for the task: (returnval){ [ 978.696641] env[62503]: value = "task-1388069" [ 978.696641] env[62503]: _type = "Task" [ 978.696641] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 978.707264] env[62503]: DEBUG oslo_vmware.api [None req-fd23b35c-41c1-495e-b29d-5f7df4b94656 tempest-ServersV294TestFqdnHostnames-893294453 tempest-ServersV294TestFqdnHostnames-893294453-project-member] Task: {'id': task-1388069, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.775435] env[62503]: DEBUG oslo_concurrency.lockutils [None req-a8dd5bc6-9259-46ab-bd5b-40003e4e3b6a tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Lock "12dff44c-ebb9-4fa3-8396-defcdb474152" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.059s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 978.826212] env[62503]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388068, 'name': CreateVM_Task, 'duration_secs': 0.291653} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 978.826536] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b120b29f-0e26-465f-bc6f-4214525ae2de] Created VM on the ESX host {{(pid=62503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 978.827070] env[62503]: DEBUG oslo_concurrency.lockutils [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 978.827249] env[62503]: DEBUG oslo_concurrency.lockutils [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Acquired lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 978.827558] env[62503]: DEBUG oslo_concurrency.lockutils [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 978.827813] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c9c6ac85-4589-4936-93c6-2bd8a237a512 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.832663] env[62503]: DEBUG oslo_vmware.api [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Waiting for the task: (returnval){ [ 978.832663] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52ade2fd-4a8e-7ac3-f503-af79b6d340ca" [ 978.832663] env[62503]: _type = "Task" [ 978.832663] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 978.848448] env[62503]: DEBUG oslo_vmware.api [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52ade2fd-4a8e-7ac3-f503-af79b6d340ca, 'name': SearchDatastore_Task, 'duration_secs': 0.009789} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 978.849072] env[62503]: DEBUG oslo_concurrency.lockutils [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Releasing lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 978.849314] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] [instance: b120b29f-0e26-465f-bc6f-4214525ae2de] Processing image 8150ca02-f879-471d-8913-459408f127a1 {{(pid=62503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 978.849574] env[62503]: DEBUG oslo_concurrency.lockutils [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 978.849732] env[62503]: DEBUG oslo_concurrency.lockutils [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Acquired lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 978.849916] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 978.850185] env[62503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-aa5a6971-8361-4dd7-8db5-408b6a8b4929 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.855652] env[62503]: DEBUG oslo_vmware.api [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]523b9bba-f820-b229-9255-f6623fe2f824, 'name': SearchDatastore_Task, 'duration_secs': 0.008162} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 978.856163] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b274b256-316c-4230-a703-7c988b959b62 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.860995] env[62503]: DEBUG oslo_vmware.api [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Waiting for the task: (returnval){ [ 978.860995] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]525e768b-8351-1e1b-a7ae-67f425b8a2c1" [ 978.860995] env[62503]: _type = "Task" [ 978.860995] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 978.862430] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 978.862612] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 978.866482] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-10e65d25-31eb-42e7-9b36-c5e864ae6622 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.870295] env[62503]: DEBUG nova.compute.manager [req-622dd332-205f-48cb-a40e-27ccdf1ec7ec req-d228309a-df61-4965-a55f-281be69d0fbd service nova] [instance: ccc542a3-ff01-42ca-965e-706bed4c6e07] Received event network-vif-plugged-a06bfb1f-0b51-4150-8e23-cdfe68e9c27f {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 978.870537] env[62503]: DEBUG oslo_concurrency.lockutils [req-622dd332-205f-48cb-a40e-27ccdf1ec7ec req-d228309a-df61-4965-a55f-281be69d0fbd service nova] Acquiring lock "ccc542a3-ff01-42ca-965e-706bed4c6e07-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 978.870757] env[62503]: DEBUG oslo_concurrency.lockutils [req-622dd332-205f-48cb-a40e-27ccdf1ec7ec req-d228309a-df61-4965-a55f-281be69d0fbd service nova] Lock "ccc542a3-ff01-42ca-965e-706bed4c6e07-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 978.870932] env[62503]: DEBUG oslo_concurrency.lockutils [req-622dd332-205f-48cb-a40e-27ccdf1ec7ec req-d228309a-df61-4965-a55f-281be69d0fbd service nova] Lock "ccc542a3-ff01-42ca-965e-706bed4c6e07-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 978.871118] env[62503]: DEBUG nova.compute.manager [req-622dd332-205f-48cb-a40e-27ccdf1ec7ec req-d228309a-df61-4965-a55f-281be69d0fbd service nova] [instance: ccc542a3-ff01-42ca-965e-706bed4c6e07] No waiting events found dispatching network-vif-plugged-a06bfb1f-0b51-4150-8e23-cdfe68e9c27f {{(pid=62503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 978.871288] env[62503]: WARNING nova.compute.manager [req-622dd332-205f-48cb-a40e-27ccdf1ec7ec req-d228309a-df61-4965-a55f-281be69d0fbd service nova] [instance: ccc542a3-ff01-42ca-965e-706bed4c6e07] Received unexpected event network-vif-plugged-a06bfb1f-0b51-4150-8e23-cdfe68e9c27f for instance with vm_state building and task_state spawning. [ 978.877049] env[62503]: DEBUG oslo_vmware.api [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]525e768b-8351-1e1b-a7ae-67f425b8a2c1, 'name': SearchDatastore_Task, 'duration_secs': 0.008477} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 978.878238] env[62503]: DEBUG oslo_concurrency.lockutils [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 978.878498] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk to [datastore2] a4e7ec7f-82d1-4fd8-b9c4-bf869aa0968d/a4e7ec7f-82d1-4fd8-b9c4-bf869aa0968d.vmdk {{(pid=62503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 978.878804] env[62503]: DEBUG oslo_vmware.api [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Waiting for the task: (returnval){ [ 978.878804] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52bd70c9-61ba-f37d-dbc4-3d3916631678" [ 978.878804] env[62503]: _type = "Task" [ 978.878804] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 978.878989] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d1702806-6f8d-46a2-a2a4-39de9ac4fde8 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.889286] env[62503]: DEBUG oslo_vmware.api [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52bd70c9-61ba-f37d-dbc4-3d3916631678, 'name': SearchDatastore_Task, 'duration_secs': 0.008258} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 978.891281] env[62503]: DEBUG oslo_vmware.api [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Waiting for the task: (returnval){ [ 978.891281] env[62503]: value = "task-1388070" [ 978.891281] env[62503]: _type = "Task" [ 978.891281] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 978.893517] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1fa10acb-b0e0-4b7b-9e21-1f4dc57c8621 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.901365] env[62503]: DEBUG oslo_vmware.api [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Waiting for the task: (returnval){ [ 978.901365] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]522e07de-8856-4453-06e8-a773b928003f" [ 978.901365] env[62503]: _type = "Task" [ 978.901365] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 978.904295] env[62503]: DEBUG oslo_vmware.api [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Task: {'id': task-1388070, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.912356] env[62503]: DEBUG oslo_vmware.api [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]522e07de-8856-4453-06e8-a773b928003f, 'name': SearchDatastore_Task, 'duration_secs': 0.008133} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 978.912480] env[62503]: DEBUG oslo_concurrency.lockutils [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Releasing lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 978.912714] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk to [datastore1] b120b29f-0e26-465f-bc6f-4214525ae2de/b120b29f-0e26-465f-bc6f-4214525ae2de.vmdk {{(pid=62503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 978.912957] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-02fb3bc1-6b2b-459e-a3c1-57378087a77d {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.918315] env[62503]: DEBUG oslo_vmware.api [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Waiting for the task: (returnval){ [ 978.918315] env[62503]: value = "task-1388071" [ 978.918315] env[62503]: _type = "Task" [ 978.918315] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 978.927924] env[62503]: DEBUG oslo_vmware.api [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Task: {'id': task-1388071, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 979.047860] env[62503]: DEBUG nova.network.neutron [None req-11342709-c4fc-4d03-8c6f-f5c2d7bba682 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: ccc542a3-ff01-42ca-965e-706bed4c6e07] Successfully updated port: a06bfb1f-0b51-4150-8e23-cdfe68e9c27f {{(pid=62503) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 979.067702] env[62503]: DEBUG nova.network.neutron [req-050ac06a-750c-4a94-b243-48f8ac52f8b3 req-8c622bf5-da92-43ce-9e87-fd3df27083a0 service nova] [instance: b120b29f-0e26-465f-bc6f-4214525ae2de] Updated VIF entry in instance network info cache for port dac5db33-8213-452f-8974-e4fcfb5e3013. {{(pid=62503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 979.068102] env[62503]: DEBUG nova.network.neutron [req-050ac06a-750c-4a94-b243-48f8ac52f8b3 req-8c622bf5-da92-43ce-9e87-fd3df27083a0 service nova] [instance: b120b29f-0e26-465f-bc6f-4214525ae2de] Updating instance_info_cache with network_info: [{"id": "dac5db33-8213-452f-8974-e4fcfb5e3013", "address": "fa:16:3e:2d:e7:de", "network": {"id": "7fbaee7b-bc73-4090-b263-cba558fcc93a", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1602059693-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c9b69dfe9a9a44188c612fd777341101", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "457c42cd-4ddb-4374-923e-d419b7f6eaff", "external-id": "nsx-vlan-transportzone-575", "segmentation_id": 575, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdac5db33-82", "ovs_interfaceid": "dac5db33-8213-452f-8974-e4fcfb5e3013", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 979.207828] env[62503]: DEBUG oslo_vmware.api [None req-fd23b35c-41c1-495e-b29d-5f7df4b94656 tempest-ServersV294TestFqdnHostnames-893294453 tempest-ServersV294TestFqdnHostnames-893294453-project-member] Task: {'id': task-1388069, 'name': PowerOnVM_Task, 'duration_secs': 0.467222} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 979.208300] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd23b35c-41c1-495e-b29d-5f7df4b94656 tempest-ServersV294TestFqdnHostnames-893294453 tempest-ServersV294TestFqdnHostnames-893294453-project-member] [instance: bba6c92b-cac3-4677-a8f4-57a2704fc685] Powered on the VM {{(pid=62503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 979.208424] env[62503]: INFO nova.compute.manager [None req-fd23b35c-41c1-495e-b29d-5f7df4b94656 tempest-ServersV294TestFqdnHostnames-893294453 tempest-ServersV294TestFqdnHostnames-893294453-project-member] [instance: bba6c92b-cac3-4677-a8f4-57a2704fc685] Took 7.49 seconds to spawn the instance on the hypervisor. [ 979.208548] env[62503]: DEBUG nova.compute.manager [None req-fd23b35c-41c1-495e-b29d-5f7df4b94656 tempest-ServersV294TestFqdnHostnames-893294453 tempest-ServersV294TestFqdnHostnames-893294453-project-member] [instance: bba6c92b-cac3-4677-a8f4-57a2704fc685] Checking state {{(pid=62503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1794}} [ 979.209469] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-123ea5b2-b635-407d-84ae-edcbc58a2ed8 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.406711] env[62503]: DEBUG oslo_vmware.api [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Task: {'id': task-1388070, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 979.429624] env[62503]: DEBUG oslo_vmware.api [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Task: {'id': task-1388071, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 979.455160] env[62503]: DEBUG nova.network.neutron [-] [instance: 4d7f62b3-42d0-4f98-bac4-541f116c9709] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 979.553937] env[62503]: DEBUG oslo_concurrency.lockutils [None req-11342709-c4fc-4d03-8c6f-f5c2d7bba682 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Acquiring lock "refresh_cache-ccc542a3-ff01-42ca-965e-706bed4c6e07" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 979.554121] env[62503]: DEBUG oslo_concurrency.lockutils [None req-11342709-c4fc-4d03-8c6f-f5c2d7bba682 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Acquired lock "refresh_cache-ccc542a3-ff01-42ca-965e-706bed4c6e07" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 979.554283] env[62503]: DEBUG nova.network.neutron [None req-11342709-c4fc-4d03-8c6f-f5c2d7bba682 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: ccc542a3-ff01-42ca-965e-706bed4c6e07] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 979.570777] env[62503]: DEBUG oslo_concurrency.lockutils [req-050ac06a-750c-4a94-b243-48f8ac52f8b3 req-8c622bf5-da92-43ce-9e87-fd3df27083a0 service nova] Releasing lock "refresh_cache-b120b29f-0e26-465f-bc6f-4214525ae2de" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 979.730696] env[62503]: DEBUG oslo_concurrency.lockutils [None req-559d326e-c9ab-4657-aa3f-f3c3544de7c8 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Acquiring lock "c1a41261-03d3-4dde-9b90-68bdec1a548b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 979.730932] env[62503]: DEBUG oslo_concurrency.lockutils [None req-559d326e-c9ab-4657-aa3f-f3c3544de7c8 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Lock "c1a41261-03d3-4dde-9b90-68bdec1a548b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 979.732274] env[62503]: INFO nova.compute.manager [None req-fd23b35c-41c1-495e-b29d-5f7df4b94656 tempest-ServersV294TestFqdnHostnames-893294453 tempest-ServersV294TestFqdnHostnames-893294453-project-member] [instance: bba6c92b-cac3-4677-a8f4-57a2704fc685] Took 14.34 seconds to build instance. [ 979.824429] env[62503]: DEBUG nova.compute.manager [req-c8c1ac54-830a-481f-baf0-265386122097 req-c11f3454-3a1f-4fde-9c83-3fd140b69d43 service nova] [instance: 4d7f62b3-42d0-4f98-bac4-541f116c9709] Received event network-vif-deleted-67894e0d-c4f4-47d4-bcac-520aa7a3626b {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 979.906679] env[62503]: DEBUG oslo_vmware.api [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Task: {'id': task-1388070, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.561233} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 979.907058] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk to [datastore2] a4e7ec7f-82d1-4fd8-b9c4-bf869aa0968d/a4e7ec7f-82d1-4fd8-b9c4-bf869aa0968d.vmdk {{(pid=62503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 979.907291] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] [instance: a4e7ec7f-82d1-4fd8-b9c4-bf869aa0968d] Extending root virtual disk to 1048576 {{(pid=62503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 979.907542] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-28d29a80-db65-43c4-9bc5-73842e1ad443 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.913997] env[62503]: DEBUG oslo_vmware.api [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Waiting for the task: (returnval){ [ 979.913997] env[62503]: value = "task-1388072" [ 979.913997] env[62503]: _type = "Task" [ 979.913997] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 979.922085] env[62503]: DEBUG oslo_vmware.api [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Task: {'id': task-1388072, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 979.929284] env[62503]: DEBUG oslo_vmware.api [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Task: {'id': task-1388071, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.610277} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 979.929532] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk to [datastore1] b120b29f-0e26-465f-bc6f-4214525ae2de/b120b29f-0e26-465f-bc6f-4214525ae2de.vmdk {{(pid=62503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 979.929757] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] [instance: b120b29f-0e26-465f-bc6f-4214525ae2de] Extending root virtual disk to 1048576 {{(pid=62503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 979.929989] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-07380fc2-44dc-42ed-b99a-e83d8ec6341f {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.936078] env[62503]: DEBUG oslo_vmware.api [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Waiting for the task: (returnval){ [ 979.936078] env[62503]: value = "task-1388073" [ 979.936078] env[62503]: _type = "Task" [ 979.936078] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 979.943530] env[62503]: DEBUG oslo_vmware.api [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Task: {'id': task-1388073, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 979.957547] env[62503]: INFO nova.compute.manager [-] [instance: 4d7f62b3-42d0-4f98-bac4-541f116c9709] Took 1.29 seconds to deallocate network for instance. [ 980.085812] env[62503]: DEBUG nova.network.neutron [None req-11342709-c4fc-4d03-8c6f-f5c2d7bba682 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: ccc542a3-ff01-42ca-965e-706bed4c6e07] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 980.221696] env[62503]: DEBUG nova.network.neutron [None req-11342709-c4fc-4d03-8c6f-f5c2d7bba682 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: ccc542a3-ff01-42ca-965e-706bed4c6e07] Updating instance_info_cache with network_info: [{"id": "a06bfb1f-0b51-4150-8e23-cdfe68e9c27f", "address": "fa:16:3e:25:71:4d", "network": {"id": "1705446b-db63-4b14-9929-b692ca586b36", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1437181237-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e818e5ee9dc24efa96747c9558514a15", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "77aa121f-8fb6-42f3-aaea-43addfe449b2", "external-id": "nsx-vlan-transportzone-288", "segmentation_id": 288, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa06bfb1f-0b", "ovs_interfaceid": "a06bfb1f-0b51-4150-8e23-cdfe68e9c27f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 980.233732] env[62503]: DEBUG nova.compute.manager [None req-559d326e-c9ab-4657-aa3f-f3c3544de7c8 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: c1a41261-03d3-4dde-9b90-68bdec1a548b] Starting instance... {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 980.236680] env[62503]: DEBUG oslo_concurrency.lockutils [None req-fd23b35c-41c1-495e-b29d-5f7df4b94656 tempest-ServersV294TestFqdnHostnames-893294453 tempest-ServersV294TestFqdnHostnames-893294453-project-member] Lock "bba6c92b-cac3-4677-a8f4-57a2704fc685" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.857s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 980.423721] env[62503]: DEBUG oslo_vmware.api [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Task: {'id': task-1388072, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.278079} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 980.423971] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] [instance: a4e7ec7f-82d1-4fd8-b9c4-bf869aa0968d] Extended root virtual disk {{(pid=62503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 980.424744] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0babcc2-6e26-4f0f-9fc2-5c5a2cf0c3af {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.446120] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] [instance: a4e7ec7f-82d1-4fd8-b9c4-bf869aa0968d] Reconfiguring VM instance instance-0000005d to attach disk [datastore2] a4e7ec7f-82d1-4fd8-b9c4-bf869aa0968d/a4e7ec7f-82d1-4fd8-b9c4-bf869aa0968d.vmdk or device None with type sparse {{(pid=62503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 980.449363] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-47a70083-d9df-4c97-93ee-7e2fd7852dc2 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.463974] env[62503]: DEBUG oslo_concurrency.lockutils [None req-c651edfa-b32c-49eb-b6dd-6e17d350a552 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 980.463974] env[62503]: DEBUG oslo_concurrency.lockutils [None req-c651edfa-b32c-49eb-b6dd-6e17d350a552 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 980.464164] env[62503]: DEBUG nova.objects.instance [None req-c651edfa-b32c-49eb-b6dd-6e17d350a552 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Lazy-loading 'resources' on Instance uuid 4d7f62b3-42d0-4f98-bac4-541f116c9709 {{(pid=62503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 980.472335] env[62503]: DEBUG oslo_vmware.api [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Task: {'id': task-1388073, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065296} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 980.474033] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] [instance: b120b29f-0e26-465f-bc6f-4214525ae2de] Extended root virtual disk {{(pid=62503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 980.474349] env[62503]: DEBUG oslo_vmware.api [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Waiting for the task: (returnval){ [ 980.474349] env[62503]: value = "task-1388074" [ 980.474349] env[62503]: _type = "Task" [ 980.474349] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 980.475256] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43920755-e7d2-4ba0-97fa-d137bf387f23 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.502721] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] [instance: b120b29f-0e26-465f-bc6f-4214525ae2de] Reconfiguring VM instance instance-0000005e to attach disk [datastore1] b120b29f-0e26-465f-bc6f-4214525ae2de/b120b29f-0e26-465f-bc6f-4214525ae2de.vmdk or device None with type sparse {{(pid=62503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 980.503050] env[62503]: DEBUG oslo_vmware.api [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Task: {'id': task-1388074, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.503361] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9fe82f3d-1974-46fa-8fba-b79b5e14fc72 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.522981] env[62503]: DEBUG oslo_vmware.api [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Waiting for the task: (returnval){ [ 980.522981] env[62503]: value = "task-1388075" [ 980.522981] env[62503]: _type = "Task" [ 980.522981] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 980.530545] env[62503]: DEBUG oslo_vmware.api [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Task: {'id': task-1388075, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.724690] env[62503]: DEBUG oslo_concurrency.lockutils [None req-11342709-c4fc-4d03-8c6f-f5c2d7bba682 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Releasing lock "refresh_cache-ccc542a3-ff01-42ca-965e-706bed4c6e07" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 980.725078] env[62503]: DEBUG nova.compute.manager [None req-11342709-c4fc-4d03-8c6f-f5c2d7bba682 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: ccc542a3-ff01-42ca-965e-706bed4c6e07] Instance network_info: |[{"id": "a06bfb1f-0b51-4150-8e23-cdfe68e9c27f", "address": "fa:16:3e:25:71:4d", "network": {"id": "1705446b-db63-4b14-9929-b692ca586b36", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1437181237-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e818e5ee9dc24efa96747c9558514a15", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "77aa121f-8fb6-42f3-aaea-43addfe449b2", "external-id": "nsx-vlan-transportzone-288", "segmentation_id": 288, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa06bfb1f-0b", "ovs_interfaceid": "a06bfb1f-0b51-4150-8e23-cdfe68e9c27f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1999}} [ 980.725566] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-11342709-c4fc-4d03-8c6f-f5c2d7bba682 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: ccc542a3-ff01-42ca-965e-706bed4c6e07] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:25:71:4d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '77aa121f-8fb6-42f3-aaea-43addfe449b2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a06bfb1f-0b51-4150-8e23-cdfe68e9c27f', 'vif_model': 'vmxnet3'}] {{(pid=62503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 980.733320] env[62503]: DEBUG oslo.service.loopingcall [None req-11342709-c4fc-4d03-8c6f-f5c2d7bba682 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 980.733584] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ccc542a3-ff01-42ca-965e-706bed4c6e07] Creating VM on the ESX host {{(pid=62503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 980.733859] env[62503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3e65b85b-f083-4290-91c0-99761b42d1c3 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.758449] env[62503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 980.758449] env[62503]: value = "task-1388076" [ 980.758449] env[62503]: _type = "Task" [ 980.758449] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 980.767151] env[62503]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388076, 'name': CreateVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.773736] env[62503]: DEBUG oslo_concurrency.lockutils [None req-559d326e-c9ab-4657-aa3f-f3c3544de7c8 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 980.903898] env[62503]: DEBUG nova.compute.manager [req-23105627-f3c4-4c95-b86f-818e40f1b486 req-6abacfa5-fa05-4574-969f-66add13d6ca2 service nova] [instance: ccc542a3-ff01-42ca-965e-706bed4c6e07] Received event network-changed-a06bfb1f-0b51-4150-8e23-cdfe68e9c27f {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 980.904201] env[62503]: DEBUG nova.compute.manager [req-23105627-f3c4-4c95-b86f-818e40f1b486 req-6abacfa5-fa05-4574-969f-66add13d6ca2 service nova] [instance: ccc542a3-ff01-42ca-965e-706bed4c6e07] Refreshing instance network info cache due to event network-changed-a06bfb1f-0b51-4150-8e23-cdfe68e9c27f. {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11432}} [ 980.904579] env[62503]: DEBUG oslo_concurrency.lockutils [req-23105627-f3c4-4c95-b86f-818e40f1b486 req-6abacfa5-fa05-4574-969f-66add13d6ca2 service nova] Acquiring lock "refresh_cache-ccc542a3-ff01-42ca-965e-706bed4c6e07" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 980.904801] env[62503]: DEBUG oslo_concurrency.lockutils [req-23105627-f3c4-4c95-b86f-818e40f1b486 req-6abacfa5-fa05-4574-969f-66add13d6ca2 service nova] Acquired lock "refresh_cache-ccc542a3-ff01-42ca-965e-706bed4c6e07" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 980.904995] env[62503]: DEBUG nova.network.neutron [req-23105627-f3c4-4c95-b86f-818e40f1b486 req-6abacfa5-fa05-4574-969f-66add13d6ca2 service nova] [instance: ccc542a3-ff01-42ca-965e-706bed4c6e07] Refreshing network info cache for port a06bfb1f-0b51-4150-8e23-cdfe68e9c27f {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 980.988412] env[62503]: DEBUG oslo_vmware.api [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Task: {'id': task-1388074, 'name': ReconfigVM_Task, 'duration_secs': 0.453942} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 980.988710] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] [instance: a4e7ec7f-82d1-4fd8-b9c4-bf869aa0968d] Reconfigured VM instance instance-0000005d to attach disk [datastore2] a4e7ec7f-82d1-4fd8-b9c4-bf869aa0968d/a4e7ec7f-82d1-4fd8-b9c4-bf869aa0968d.vmdk or device None with type sparse {{(pid=62503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 980.991988] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-66c0dcb8-c4d8-4073-88c5-bead70f97e01 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.001445] env[62503]: DEBUG oslo_vmware.api [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Waiting for the task: (returnval){ [ 981.001445] env[62503]: value = "task-1388077" [ 981.001445] env[62503]: _type = "Task" [ 981.001445] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 981.012357] env[62503]: DEBUG oslo_vmware.api [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Task: {'id': task-1388077, 'name': Rename_Task} progress is 6%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.035852] env[62503]: DEBUG oslo_vmware.api [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Task: {'id': task-1388075, 'name': ReconfigVM_Task, 'duration_secs': 0.429171} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 981.036777] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] [instance: b120b29f-0e26-465f-bc6f-4214525ae2de] Reconfigured VM instance instance-0000005e to attach disk [datastore1] b120b29f-0e26-465f-bc6f-4214525ae2de/b120b29f-0e26-465f-bc6f-4214525ae2de.vmdk or device None with type sparse {{(pid=62503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 981.037610] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3fee7127-b48f-4679-8934-8e92001e9455 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.046171] env[62503]: DEBUG oslo_vmware.api [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Waiting for the task: (returnval){ [ 981.046171] env[62503]: value = "task-1388078" [ 981.046171] env[62503]: _type = "Task" [ 981.046171] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 981.055886] env[62503]: DEBUG oslo_vmware.api [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Task: {'id': task-1388078, 'name': Rename_Task} progress is 5%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.119489] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13586503-d3ed-4bef-9a4c-4a459484e2d1 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.128771] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f595ade8-08b4-494b-bd9f-d069db80ae35 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.160927] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09da86d7-1a2e-4924-aa0e-92fe153e83e1 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.168517] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2ff9faf-28ea-49d0-b225-c13a6a773809 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.182086] env[62503]: DEBUG nova.compute.provider_tree [None req-c651edfa-b32c-49eb-b6dd-6e17d350a552 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 981.267946] env[62503]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388076, 'name': CreateVM_Task, 'duration_secs': 0.329434} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 981.268523] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ccc542a3-ff01-42ca-965e-706bed4c6e07] Created VM on the ESX host {{(pid=62503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 981.269605] env[62503]: DEBUG oslo_concurrency.lockutils [None req-11342709-c4fc-4d03-8c6f-f5c2d7bba682 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 981.269848] env[62503]: DEBUG oslo_concurrency.lockutils [None req-11342709-c4fc-4d03-8c6f-f5c2d7bba682 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Acquired lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 981.270205] env[62503]: DEBUG oslo_concurrency.lockutils [None req-11342709-c4fc-4d03-8c6f-f5c2d7bba682 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 981.270473] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4f6afc4a-3325-48fe-9a4f-50dad0eb6979 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.275389] env[62503]: DEBUG oslo_vmware.api [None req-11342709-c4fc-4d03-8c6f-f5c2d7bba682 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Waiting for the task: (returnval){ [ 981.275389] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52a9257b-9ec6-7de1-1e73-a0702e58ed14" [ 981.275389] env[62503]: _type = "Task" [ 981.275389] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 981.283238] env[62503]: DEBUG oslo_vmware.api [None req-11342709-c4fc-4d03-8c6f-f5c2d7bba682 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52a9257b-9ec6-7de1-1e73-a0702e58ed14, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.512741] env[62503]: DEBUG oslo_vmware.api [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Task: {'id': task-1388077, 'name': Rename_Task, 'duration_secs': 0.127566} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 981.513016] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] [instance: a4e7ec7f-82d1-4fd8-b9c4-bf869aa0968d] Powering on the VM {{(pid=62503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 981.513674] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5a5fab12-2ff5-4cb7-836d-bd9de2d232a2 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.519568] env[62503]: DEBUG oslo_vmware.api [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Waiting for the task: (returnval){ [ 981.519568] env[62503]: value = "task-1388079" [ 981.519568] env[62503]: _type = "Task" [ 981.519568] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 981.526989] env[62503]: DEBUG oslo_vmware.api [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Task: {'id': task-1388079, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.555354] env[62503]: DEBUG oslo_vmware.api [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Task: {'id': task-1388078, 'name': Rename_Task, 'duration_secs': 0.14226} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 981.555659] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] [instance: b120b29f-0e26-465f-bc6f-4214525ae2de] Powering on the VM {{(pid=62503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 981.555941] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4ddc98a4-9bb8-431e-ae1b-f8c33e0fce04 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.561879] env[62503]: DEBUG oslo_vmware.api [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Waiting for the task: (returnval){ [ 981.561879] env[62503]: value = "task-1388080" [ 981.561879] env[62503]: _type = "Task" [ 981.561879] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 981.576280] env[62503]: DEBUG oslo_vmware.api [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Task: {'id': task-1388080, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.631208] env[62503]: DEBUG nova.network.neutron [req-23105627-f3c4-4c95-b86f-818e40f1b486 req-6abacfa5-fa05-4574-969f-66add13d6ca2 service nova] [instance: ccc542a3-ff01-42ca-965e-706bed4c6e07] Updated VIF entry in instance network info cache for port a06bfb1f-0b51-4150-8e23-cdfe68e9c27f. {{(pid=62503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 981.632053] env[62503]: DEBUG nova.network.neutron [req-23105627-f3c4-4c95-b86f-818e40f1b486 req-6abacfa5-fa05-4574-969f-66add13d6ca2 service nova] [instance: ccc542a3-ff01-42ca-965e-706bed4c6e07] Updating instance_info_cache with network_info: [{"id": "a06bfb1f-0b51-4150-8e23-cdfe68e9c27f", "address": "fa:16:3e:25:71:4d", "network": {"id": "1705446b-db63-4b14-9929-b692ca586b36", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1437181237-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e818e5ee9dc24efa96747c9558514a15", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "77aa121f-8fb6-42f3-aaea-43addfe449b2", "external-id": "nsx-vlan-transportzone-288", "segmentation_id": 288, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa06bfb1f-0b", "ovs_interfaceid": "a06bfb1f-0b51-4150-8e23-cdfe68e9c27f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 981.689159] env[62503]: DEBUG nova.scheduler.client.report [None req-c651edfa-b32c-49eb-b6dd-6e17d350a552 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 981.788754] env[62503]: DEBUG oslo_vmware.api [None req-11342709-c4fc-4d03-8c6f-f5c2d7bba682 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52a9257b-9ec6-7de1-1e73-a0702e58ed14, 'name': SearchDatastore_Task, 'duration_secs': 0.010152} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 981.789101] env[62503]: DEBUG oslo_concurrency.lockutils [None req-11342709-c4fc-4d03-8c6f-f5c2d7bba682 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Releasing lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 981.789409] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-11342709-c4fc-4d03-8c6f-f5c2d7bba682 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: ccc542a3-ff01-42ca-965e-706bed4c6e07] Processing image 8150ca02-f879-471d-8913-459408f127a1 {{(pid=62503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 981.789732] env[62503]: DEBUG oslo_concurrency.lockutils [None req-11342709-c4fc-4d03-8c6f-f5c2d7bba682 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 981.789958] env[62503]: DEBUG oslo_concurrency.lockutils [None req-11342709-c4fc-4d03-8c6f-f5c2d7bba682 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Acquired lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 981.790256] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-11342709-c4fc-4d03-8c6f-f5c2d7bba682 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 981.790593] env[62503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9f52d31a-8d74-4f91-acde-74f81e711a23 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.798711] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-11342709-c4fc-4d03-8c6f-f5c2d7bba682 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 981.798956] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-11342709-c4fc-4d03-8c6f-f5c2d7bba682 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 981.799665] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3853ddc9-c083-4f5e-967e-8390bfc17109 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.804587] env[62503]: DEBUG oslo_vmware.api [None req-11342709-c4fc-4d03-8c6f-f5c2d7bba682 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Waiting for the task: (returnval){ [ 981.804587] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]5261d6b3-0432-9bb5-1f2e-e1b114cef925" [ 981.804587] env[62503]: _type = "Task" [ 981.804587] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 981.811907] env[62503]: DEBUG oslo_vmware.api [None req-11342709-c4fc-4d03-8c6f-f5c2d7bba682 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]5261d6b3-0432-9bb5-1f2e-e1b114cef925, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.029826] env[62503]: DEBUG oslo_vmware.api [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Task: {'id': task-1388079, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.071299] env[62503]: DEBUG oslo_vmware.api [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Task: {'id': task-1388080, 'name': PowerOnVM_Task, 'duration_secs': 0.499689} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 982.071553] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] [instance: b120b29f-0e26-465f-bc6f-4214525ae2de] Powered on the VM {{(pid=62503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 982.071758] env[62503]: INFO nova.compute.manager [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] [instance: b120b29f-0e26-465f-bc6f-4214525ae2de] Took 5.79 seconds to spawn the instance on the hypervisor. [ 982.071949] env[62503]: DEBUG nova.compute.manager [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] [instance: b120b29f-0e26-465f-bc6f-4214525ae2de] Checking state {{(pid=62503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1794}} [ 982.072749] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24a05bcf-df51-4521-9599-2c1c800514c5 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.134866] env[62503]: DEBUG oslo_concurrency.lockutils [req-23105627-f3c4-4c95-b86f-818e40f1b486 req-6abacfa5-fa05-4574-969f-66add13d6ca2 service nova] Releasing lock "refresh_cache-ccc542a3-ff01-42ca-965e-706bed4c6e07" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 982.194865] env[62503]: DEBUG oslo_concurrency.lockutils [None req-c651edfa-b32c-49eb-b6dd-6e17d350a552 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.731s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 982.197131] env[62503]: DEBUG oslo_concurrency.lockutils [None req-559d326e-c9ab-4657-aa3f-f3c3544de7c8 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.423s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 982.199077] env[62503]: INFO nova.compute.claims [None req-559d326e-c9ab-4657-aa3f-f3c3544de7c8 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: c1a41261-03d3-4dde-9b90-68bdec1a548b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 982.213292] env[62503]: INFO nova.scheduler.client.report [None req-c651edfa-b32c-49eb-b6dd-6e17d350a552 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Deleted allocations for instance 4d7f62b3-42d0-4f98-bac4-541f116c9709 [ 982.314635] env[62503]: DEBUG oslo_vmware.api [None req-11342709-c4fc-4d03-8c6f-f5c2d7bba682 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]5261d6b3-0432-9bb5-1f2e-e1b114cef925, 'name': SearchDatastore_Task, 'duration_secs': 0.009454} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 982.315519] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b4b327a3-c090-4888-8654-c6c8bbf0b623 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.320933] env[62503]: DEBUG oslo_vmware.api [None req-11342709-c4fc-4d03-8c6f-f5c2d7bba682 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Waiting for the task: (returnval){ [ 982.320933] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52139673-08e2-ba12-40c2-c6fa9bd1d22b" [ 982.320933] env[62503]: _type = "Task" [ 982.320933] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 982.328655] env[62503]: DEBUG oslo_vmware.api [None req-11342709-c4fc-4d03-8c6f-f5c2d7bba682 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52139673-08e2-ba12-40c2-c6fa9bd1d22b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.530777] env[62503]: DEBUG oslo_vmware.api [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Task: {'id': task-1388079, 'name': PowerOnVM_Task, 'duration_secs': 0.54014} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 982.531074] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] [instance: a4e7ec7f-82d1-4fd8-b9c4-bf869aa0968d] Powered on the VM {{(pid=62503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 982.531286] env[62503]: INFO nova.compute.manager [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] [instance: a4e7ec7f-82d1-4fd8-b9c4-bf869aa0968d] Took 8.54 seconds to spawn the instance on the hypervisor. [ 982.531497] env[62503]: DEBUG nova.compute.manager [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] [instance: a4e7ec7f-82d1-4fd8-b9c4-bf869aa0968d] Checking state {{(pid=62503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1794}} [ 982.532273] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d98dff54-9afc-42ab-af62-19414ae287b3 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.589878] env[62503]: INFO nova.compute.manager [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] [instance: b120b29f-0e26-465f-bc6f-4214525ae2de] Took 16.57 seconds to build instance. [ 982.720389] env[62503]: DEBUG oslo_concurrency.lockutils [None req-c651edfa-b32c-49eb-b6dd-6e17d350a552 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Lock "4d7f62b3-42d0-4f98-bac4-541f116c9709" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.174s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 982.832330] env[62503]: DEBUG oslo_vmware.api [None req-11342709-c4fc-4d03-8c6f-f5c2d7bba682 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52139673-08e2-ba12-40c2-c6fa9bd1d22b, 'name': SearchDatastore_Task, 'duration_secs': 0.009833} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 982.832575] env[62503]: DEBUG oslo_concurrency.lockutils [None req-11342709-c4fc-4d03-8c6f-f5c2d7bba682 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Releasing lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 982.832822] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-11342709-c4fc-4d03-8c6f-f5c2d7bba682 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk to [datastore1] ccc542a3-ff01-42ca-965e-706bed4c6e07/ccc542a3-ff01-42ca-965e-706bed4c6e07.vmdk {{(pid=62503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 982.833112] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f4c45fc8-4af7-452a-b572-4e4a7501b711 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.839578] env[62503]: DEBUG oslo_vmware.api [None req-11342709-c4fc-4d03-8c6f-f5c2d7bba682 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Waiting for the task: (returnval){ [ 982.839578] env[62503]: value = "task-1388081" [ 982.839578] env[62503]: _type = "Task" [ 982.839578] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 982.847123] env[62503]: DEBUG oslo_vmware.api [None req-11342709-c4fc-4d03-8c6f-f5c2d7bba682 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1388081, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.928271] env[62503]: DEBUG nova.compute.manager [req-6660d42b-2831-4fee-85a9-bf64e07f72a3 req-4fc481c7-67e2-4f5e-903c-de0df004798a service nova] [instance: bba6c92b-cac3-4677-a8f4-57a2704fc685] Received event network-changed-b2c7fd20-28f2-42c2-9cdd-d3ad7d14169a {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 982.928401] env[62503]: DEBUG nova.compute.manager [req-6660d42b-2831-4fee-85a9-bf64e07f72a3 req-4fc481c7-67e2-4f5e-903c-de0df004798a service nova] [instance: bba6c92b-cac3-4677-a8f4-57a2704fc685] Refreshing instance network info cache due to event network-changed-b2c7fd20-28f2-42c2-9cdd-d3ad7d14169a. {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11432}} [ 982.928652] env[62503]: DEBUG oslo_concurrency.lockutils [req-6660d42b-2831-4fee-85a9-bf64e07f72a3 req-4fc481c7-67e2-4f5e-903c-de0df004798a service nova] Acquiring lock "refresh_cache-bba6c92b-cac3-4677-a8f4-57a2704fc685" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 982.928811] env[62503]: DEBUG oslo_concurrency.lockutils [req-6660d42b-2831-4fee-85a9-bf64e07f72a3 req-4fc481c7-67e2-4f5e-903c-de0df004798a service nova] Acquired lock "refresh_cache-bba6c92b-cac3-4677-a8f4-57a2704fc685" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 982.928976] env[62503]: DEBUG nova.network.neutron [req-6660d42b-2831-4fee-85a9-bf64e07f72a3 req-4fc481c7-67e2-4f5e-903c-de0df004798a service nova] [instance: bba6c92b-cac3-4677-a8f4-57a2704fc685] Refreshing network info cache for port b2c7fd20-28f2-42c2-9cdd-d3ad7d14169a {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 983.049374] env[62503]: INFO nova.compute.manager [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] [instance: a4e7ec7f-82d1-4fd8-b9c4-bf869aa0968d] Took 17.07 seconds to build instance. [ 983.057015] env[62503]: DEBUG oslo_concurrency.lockutils [None req-a37c3c15-a285-46bb-b3ad-140802bcfde5 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Acquiring lock "7eb8e049-dd65-43bd-829a-8f773f7ad156" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 983.057322] env[62503]: DEBUG oslo_concurrency.lockutils [None req-a37c3c15-a285-46bb-b3ad-140802bcfde5 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Lock "7eb8e049-dd65-43bd-829a-8f773f7ad156" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 983.057681] env[62503]: DEBUG oslo_concurrency.lockutils [None req-a37c3c15-a285-46bb-b3ad-140802bcfde5 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Acquiring lock "7eb8e049-dd65-43bd-829a-8f773f7ad156-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 983.057900] env[62503]: DEBUG oslo_concurrency.lockutils [None req-a37c3c15-a285-46bb-b3ad-140802bcfde5 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Lock "7eb8e049-dd65-43bd-829a-8f773f7ad156-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 983.058163] env[62503]: DEBUG oslo_concurrency.lockutils [None req-a37c3c15-a285-46bb-b3ad-140802bcfde5 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Lock "7eb8e049-dd65-43bd-829a-8f773f7ad156-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 983.060082] env[62503]: INFO nova.compute.manager [None req-a37c3c15-a285-46bb-b3ad-140802bcfde5 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: 7eb8e049-dd65-43bd-829a-8f773f7ad156] Terminating instance [ 983.061906] env[62503]: DEBUG nova.compute.manager [None req-a37c3c15-a285-46bb-b3ad-140802bcfde5 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: 7eb8e049-dd65-43bd-829a-8f773f7ad156] Start destroying the instance on the hypervisor. {{(pid=62503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3156}} [ 983.062124] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-a37c3c15-a285-46bb-b3ad-140802bcfde5 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: 7eb8e049-dd65-43bd-829a-8f773f7ad156] Destroying instance {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 983.063009] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f189905-db0e-4771-9881-e95f26015927 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.072401] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-a37c3c15-a285-46bb-b3ad-140802bcfde5 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: 7eb8e049-dd65-43bd-829a-8f773f7ad156] Powering off the VM {{(pid=62503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 983.072685] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9315324b-da6f-45ca-b217-7a3082f13473 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.078795] env[62503]: DEBUG oslo_vmware.api [None req-a37c3c15-a285-46bb-b3ad-140802bcfde5 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Waiting for the task: (returnval){ [ 983.078795] env[62503]: value = "task-1388082" [ 983.078795] env[62503]: _type = "Task" [ 983.078795] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 983.087392] env[62503]: DEBUG oslo_vmware.api [None req-a37c3c15-a285-46bb-b3ad-140802bcfde5 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Task: {'id': task-1388082, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.092107] env[62503]: DEBUG oslo_concurrency.lockutils [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Lock "b120b29f-0e26-465f-bc6f-4214525ae2de" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.081s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 983.339154] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-101e1b17-5bf5-4539-adac-5002bf24194f {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.352707] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12aaa8bc-0eed-4934-b425-b0ba057b110b {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.356022] env[62503]: DEBUG oslo_vmware.api [None req-11342709-c4fc-4d03-8c6f-f5c2d7bba682 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1388081, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.383858] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60eccdd2-f580-4cf1-a287-13fc0d672ea6 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.391398] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a5fb2da-6fa3-4671-9e5e-596d8a7ae04a {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.405728] env[62503]: DEBUG nova.compute.provider_tree [None req-559d326e-c9ab-4657-aa3f-f3c3544de7c8 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 983.491294] env[62503]: DEBUG oslo_concurrency.lockutils [None req-58dc9104-7ebc-4206-b174-f8baeb5e8efe tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Acquiring lock "a4e7ec7f-82d1-4fd8-b9c4-bf869aa0968d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 983.584122] env[62503]: DEBUG oslo_service.periodic_task [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 983.584122] env[62503]: DEBUG oslo_service.periodic_task [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 983.584122] env[62503]: DEBUG oslo_concurrency.lockutils [None req-dc390562-3b24-4016-bbe3-e515a1b5e6e3 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Lock "a4e7ec7f-82d1-4fd8-b9c4-bf869aa0968d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.580s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 983.584122] env[62503]: DEBUG oslo_concurrency.lockutils [None req-58dc9104-7ebc-4206-b174-f8baeb5e8efe tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Lock "a4e7ec7f-82d1-4fd8-b9c4-bf869aa0968d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.061s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 983.584122] env[62503]: DEBUG oslo_concurrency.lockutils [None req-58dc9104-7ebc-4206-b174-f8baeb5e8efe tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Acquiring lock "a4e7ec7f-82d1-4fd8-b9c4-bf869aa0968d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 983.584122] env[62503]: DEBUG oslo_concurrency.lockutils [None req-58dc9104-7ebc-4206-b174-f8baeb5e8efe tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Lock "a4e7ec7f-82d1-4fd8-b9c4-bf869aa0968d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 983.584122] env[62503]: DEBUG oslo_concurrency.lockutils [None req-58dc9104-7ebc-4206-b174-f8baeb5e8efe tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Lock "a4e7ec7f-82d1-4fd8-b9c4-bf869aa0968d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 983.584122] env[62503]: DEBUG oslo_concurrency.lockutils [None req-0d96d3a2-8cbc-4623-882b-2a659064bac0 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Acquiring lock "b120b29f-0e26-465f-bc6f-4214525ae2de" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 983.584122] env[62503]: DEBUG oslo_concurrency.lockutils [None req-0d96d3a2-8cbc-4623-882b-2a659064bac0 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Lock "b120b29f-0e26-465f-bc6f-4214525ae2de" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 983.584122] env[62503]: DEBUG oslo_concurrency.lockutils [None req-0d96d3a2-8cbc-4623-882b-2a659064bac0 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Acquiring lock "b120b29f-0e26-465f-bc6f-4214525ae2de-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 983.584122] env[62503]: DEBUG oslo_concurrency.lockutils [None req-0d96d3a2-8cbc-4623-882b-2a659064bac0 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Lock "b120b29f-0e26-465f-bc6f-4214525ae2de-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 983.584122] env[62503]: DEBUG oslo_concurrency.lockutils [None req-0d96d3a2-8cbc-4623-882b-2a659064bac0 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Lock "b120b29f-0e26-465f-bc6f-4214525ae2de-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 983.584122] env[62503]: INFO nova.compute.manager [None req-58dc9104-7ebc-4206-b174-f8baeb5e8efe tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] [instance: a4e7ec7f-82d1-4fd8-b9c4-bf869aa0968d] Terminating instance [ 983.584122] env[62503]: INFO nova.compute.manager [None req-0d96d3a2-8cbc-4623-882b-2a659064bac0 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] [instance: b120b29f-0e26-465f-bc6f-4214525ae2de] Terminating instance [ 983.584677] env[62503]: DEBUG nova.compute.manager [None req-58dc9104-7ebc-4206-b174-f8baeb5e8efe tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] [instance: a4e7ec7f-82d1-4fd8-b9c4-bf869aa0968d] Start destroying the instance on the hypervisor. {{(pid=62503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3156}} [ 983.584677] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-58dc9104-7ebc-4206-b174-f8baeb5e8efe tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] [instance: a4e7ec7f-82d1-4fd8-b9c4-bf869aa0968d] Destroying instance {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 983.584677] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2f77a21-51e0-4d33-92a0-b692f30a4abb {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.584677] env[62503]: DEBUG nova.compute.manager [None req-0d96d3a2-8cbc-4623-882b-2a659064bac0 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] [instance: b120b29f-0e26-465f-bc6f-4214525ae2de] Start destroying the instance on the hypervisor. {{(pid=62503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3156}} [ 983.584677] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-0d96d3a2-8cbc-4623-882b-2a659064bac0 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] [instance: b120b29f-0e26-465f-bc6f-4214525ae2de] Destroying instance {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 983.584677] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ce625c6-24bf-4ed5-8b21-bcd708a9c4aa {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.584677] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-0d96d3a2-8cbc-4623-882b-2a659064bac0 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] [instance: b120b29f-0e26-465f-bc6f-4214525ae2de] Powering off the VM {{(pid=62503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 983.584677] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-58dc9104-7ebc-4206-b174-f8baeb5e8efe tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] [instance: a4e7ec7f-82d1-4fd8-b9c4-bf869aa0968d] Powering off the VM {{(pid=62503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 983.584677] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2dd7a04e-ca1c-4d01-b1b3-8337d1c2f26c {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.584677] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-44864346-4cd9-4ac1-9e5f-acf1c39b931a {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.590656] env[62503]: DEBUG oslo_vmware.api [None req-a37c3c15-a285-46bb-b3ad-140802bcfde5 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Task: {'id': task-1388082, 'name': PowerOffVM_Task, 'duration_secs': 0.425541} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 983.594647] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-a37c3c15-a285-46bb-b3ad-140802bcfde5 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: 7eb8e049-dd65-43bd-829a-8f773f7ad156] Powered off the VM {{(pid=62503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 983.594834] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-a37c3c15-a285-46bb-b3ad-140802bcfde5 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: 7eb8e049-dd65-43bd-829a-8f773f7ad156] Unregistering the VM {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 983.595140] env[62503]: DEBUG oslo_vmware.api [None req-58dc9104-7ebc-4206-b174-f8baeb5e8efe tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Waiting for the task: (returnval){ [ 983.595140] env[62503]: value = "task-1388084" [ 983.595140] env[62503]: _type = "Task" [ 983.595140] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 983.595367] env[62503]: DEBUG oslo_vmware.api [None req-0d96d3a2-8cbc-4623-882b-2a659064bac0 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Waiting for the task: (returnval){ [ 983.595367] env[62503]: value = "task-1388083" [ 983.595367] env[62503]: _type = "Task" [ 983.595367] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 983.595542] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-564219c6-74ab-4049-8eaa-7cbe0001bdfd {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.607912] env[62503]: DEBUG oslo_vmware.api [None req-0d96d3a2-8cbc-4623-882b-2a659064bac0 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Task: {'id': task-1388083, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.610890] env[62503]: DEBUG oslo_vmware.api [None req-58dc9104-7ebc-4206-b174-f8baeb5e8efe tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Task: {'id': task-1388084, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.661046] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-a37c3c15-a285-46bb-b3ad-140802bcfde5 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: 7eb8e049-dd65-43bd-829a-8f773f7ad156] Unregistered the VM {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 983.661303] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-a37c3c15-a285-46bb-b3ad-140802bcfde5 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: 7eb8e049-dd65-43bd-829a-8f773f7ad156] Deleting contents of the VM from datastore datastore1 {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 983.661638] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-a37c3c15-a285-46bb-b3ad-140802bcfde5 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Deleting the datastore file [datastore1] 7eb8e049-dd65-43bd-829a-8f773f7ad156 {{(pid=62503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 983.662017] env[62503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-778e8f75-b494-46f6-ad0d-ede56c7dbf4e {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.664606] env[62503]: DEBUG nova.network.neutron [req-6660d42b-2831-4fee-85a9-bf64e07f72a3 req-4fc481c7-67e2-4f5e-903c-de0df004798a service nova] [instance: bba6c92b-cac3-4677-a8f4-57a2704fc685] Updated VIF entry in instance network info cache for port b2c7fd20-28f2-42c2-9cdd-d3ad7d14169a. {{(pid=62503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 983.664986] env[62503]: DEBUG nova.network.neutron [req-6660d42b-2831-4fee-85a9-bf64e07f72a3 req-4fc481c7-67e2-4f5e-903c-de0df004798a service nova] [instance: bba6c92b-cac3-4677-a8f4-57a2704fc685] Updating instance_info_cache with network_info: [{"id": "b2c7fd20-28f2-42c2-9cdd-d3ad7d14169a", "address": "fa:16:3e:ea:52:fa", "network": {"id": "dd3019b1-d7ac-4167-9afc-c4accedb7cf9", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-1017738732-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.250", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "93cccd8859f649f4a3444c42a8c188f5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9b7e9e55-3210-4fae-9648-d87e76c3d931", "external-id": "nsx-vlan-transportzone-967", "segmentation_id": 967, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb2c7fd20-28", "ovs_interfaceid": "b2c7fd20-28f2-42c2-9cdd-d3ad7d14169a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 983.670721] env[62503]: DEBUG oslo_vmware.api [None req-a37c3c15-a285-46bb-b3ad-140802bcfde5 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Waiting for the task: (returnval){ [ 983.670721] env[62503]: value = "task-1388086" [ 983.670721] env[62503]: _type = "Task" [ 983.670721] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 983.678863] env[62503]: DEBUG oslo_vmware.api [None req-a37c3c15-a285-46bb-b3ad-140802bcfde5 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Task: {'id': task-1388086, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.852025] env[62503]: DEBUG oslo_vmware.api [None req-11342709-c4fc-4d03-8c6f-f5c2d7bba682 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1388081, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.527502} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 983.852572] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-11342709-c4fc-4d03-8c6f-f5c2d7bba682 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk to [datastore1] ccc542a3-ff01-42ca-965e-706bed4c6e07/ccc542a3-ff01-42ca-965e-706bed4c6e07.vmdk {{(pid=62503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 983.852951] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-11342709-c4fc-4d03-8c6f-f5c2d7bba682 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: ccc542a3-ff01-42ca-965e-706bed4c6e07] Extending root virtual disk to 1048576 {{(pid=62503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 983.853789] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-327e3634-a531-4a32-bfcd-9379c0ecfd46 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.859516] env[62503]: DEBUG oslo_vmware.api [None req-11342709-c4fc-4d03-8c6f-f5c2d7bba682 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Waiting for the task: (returnval){ [ 983.859516] env[62503]: value = "task-1388087" [ 983.859516] env[62503]: _type = "Task" [ 983.859516] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 983.867682] env[62503]: DEBUG oslo_vmware.api [None req-11342709-c4fc-4d03-8c6f-f5c2d7bba682 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1388087, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.908855] env[62503]: DEBUG nova.scheduler.client.report [None req-559d326e-c9ab-4657-aa3f-f3c3544de7c8 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 984.045391] env[62503]: DEBUG oslo_service.periodic_task [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 984.045566] env[62503]: DEBUG nova.compute.manager [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Starting heal instance info cache {{(pid=62503) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10224}} [ 984.109742] env[62503]: DEBUG oslo_vmware.api [None req-0d96d3a2-8cbc-4623-882b-2a659064bac0 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Task: {'id': task-1388083, 'name': PowerOffVM_Task, 'duration_secs': 0.208961} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 984.112659] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-0d96d3a2-8cbc-4623-882b-2a659064bac0 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] [instance: b120b29f-0e26-465f-bc6f-4214525ae2de] Powered off the VM {{(pid=62503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 984.112845] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-0d96d3a2-8cbc-4623-882b-2a659064bac0 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] [instance: b120b29f-0e26-465f-bc6f-4214525ae2de] Unregistering the VM {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 984.113138] env[62503]: DEBUG oslo_vmware.api [None req-58dc9104-7ebc-4206-b174-f8baeb5e8efe tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Task: {'id': task-1388084, 'name': PowerOffVM_Task, 'duration_secs': 0.208581} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 984.114038] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ae13cde8-391e-4d8c-9bfa-6b5a559deb36 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.115481] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-58dc9104-7ebc-4206-b174-f8baeb5e8efe tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] [instance: a4e7ec7f-82d1-4fd8-b9c4-bf869aa0968d] Powered off the VM {{(pid=62503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 984.115627] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-58dc9104-7ebc-4206-b174-f8baeb5e8efe tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] [instance: a4e7ec7f-82d1-4fd8-b9c4-bf869aa0968d] Unregistering the VM {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 984.116132] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2a586f6f-aa9c-454c-86fa-ef72ba1be2c3 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.169247] env[62503]: DEBUG oslo_concurrency.lockutils [req-6660d42b-2831-4fee-85a9-bf64e07f72a3 req-4fc481c7-67e2-4f5e-903c-de0df004798a service nova] Releasing lock "refresh_cache-bba6c92b-cac3-4677-a8f4-57a2704fc685" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 984.179916] env[62503]: DEBUG oslo_vmware.api [None req-a37c3c15-a285-46bb-b3ad-140802bcfde5 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Task: {'id': task-1388086, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.13646} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 984.180154] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-a37c3c15-a285-46bb-b3ad-140802bcfde5 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Deleted the datastore file {{(pid=62503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 984.180342] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-a37c3c15-a285-46bb-b3ad-140802bcfde5 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: 7eb8e049-dd65-43bd-829a-8f773f7ad156] Deleted contents of the VM from datastore datastore1 {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 984.180573] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-a37c3c15-a285-46bb-b3ad-140802bcfde5 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: 7eb8e049-dd65-43bd-829a-8f773f7ad156] Instance destroyed {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 984.180724] env[62503]: INFO nova.compute.manager [None req-a37c3c15-a285-46bb-b3ad-140802bcfde5 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: 7eb8e049-dd65-43bd-829a-8f773f7ad156] Took 1.12 seconds to destroy the instance on the hypervisor. [ 984.180984] env[62503]: DEBUG oslo.service.loopingcall [None req-a37c3c15-a285-46bb-b3ad-140802bcfde5 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 984.181525] env[62503]: DEBUG nova.compute.manager [-] [instance: 7eb8e049-dd65-43bd-829a-8f773f7ad156] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 984.181639] env[62503]: DEBUG nova.network.neutron [-] [instance: 7eb8e049-dd65-43bd-829a-8f773f7ad156] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 984.202413] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-58dc9104-7ebc-4206-b174-f8baeb5e8efe tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] [instance: a4e7ec7f-82d1-4fd8-b9c4-bf869aa0968d] Unregistered the VM {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 984.202696] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-58dc9104-7ebc-4206-b174-f8baeb5e8efe tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] [instance: a4e7ec7f-82d1-4fd8-b9c4-bf869aa0968d] Deleting contents of the VM from datastore datastore2 {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 984.202848] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-58dc9104-7ebc-4206-b174-f8baeb5e8efe tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Deleting the datastore file [datastore2] a4e7ec7f-82d1-4fd8-b9c4-bf869aa0968d {{(pid=62503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 984.203564] env[62503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-72283b5b-d479-42dd-b9bc-805c2bbcff96 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.211963] env[62503]: DEBUG oslo_vmware.api [None req-58dc9104-7ebc-4206-b174-f8baeb5e8efe tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Waiting for the task: (returnval){ [ 984.211963] env[62503]: value = "task-1388090" [ 984.211963] env[62503]: _type = "Task" [ 984.211963] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 984.213299] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-0d96d3a2-8cbc-4623-882b-2a659064bac0 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] [instance: b120b29f-0e26-465f-bc6f-4214525ae2de] Unregistered the VM {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 984.213495] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-0d96d3a2-8cbc-4623-882b-2a659064bac0 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] [instance: b120b29f-0e26-465f-bc6f-4214525ae2de] Deleting contents of the VM from datastore datastore1 {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 984.213672] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-0d96d3a2-8cbc-4623-882b-2a659064bac0 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Deleting the datastore file [datastore1] b120b29f-0e26-465f-bc6f-4214525ae2de {{(pid=62503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 984.216933] env[62503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8cd6623e-88c6-4d04-aafb-8e076e8993a1 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.223659] env[62503]: DEBUG oslo_vmware.api [None req-58dc9104-7ebc-4206-b174-f8baeb5e8efe tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Task: {'id': task-1388090, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.224869] env[62503]: DEBUG oslo_vmware.api [None req-0d96d3a2-8cbc-4623-882b-2a659064bac0 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Waiting for the task: (returnval){ [ 984.224869] env[62503]: value = "task-1388091" [ 984.224869] env[62503]: _type = "Task" [ 984.224869] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 984.232455] env[62503]: DEBUG oslo_vmware.api [None req-0d96d3a2-8cbc-4623-882b-2a659064bac0 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Task: {'id': task-1388091, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.368991] env[62503]: DEBUG oslo_vmware.api [None req-11342709-c4fc-4d03-8c6f-f5c2d7bba682 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1388087, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.061207} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 984.369224] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-11342709-c4fc-4d03-8c6f-f5c2d7bba682 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: ccc542a3-ff01-42ca-965e-706bed4c6e07] Extended root virtual disk {{(pid=62503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 984.370013] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-183aee9b-2b52-4b38-b996-3f6840955036 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.393172] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-11342709-c4fc-4d03-8c6f-f5c2d7bba682 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: ccc542a3-ff01-42ca-965e-706bed4c6e07] Reconfiguring VM instance instance-0000005f to attach disk [datastore1] ccc542a3-ff01-42ca-965e-706bed4c6e07/ccc542a3-ff01-42ca-965e-706bed4c6e07.vmdk or device None with type sparse {{(pid=62503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 984.393445] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ecbe2309-2f4d-4b26-a122-ab0ee391c412 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.413239] env[62503]: DEBUG oslo_concurrency.lockutils [None req-559d326e-c9ab-4657-aa3f-f3c3544de7c8 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.216s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 984.413693] env[62503]: DEBUG nova.compute.manager [None req-559d326e-c9ab-4657-aa3f-f3c3544de7c8 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: c1a41261-03d3-4dde-9b90-68bdec1a548b] Start building networks asynchronously for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2832}} [ 984.416205] env[62503]: DEBUG oslo_vmware.api [None req-11342709-c4fc-4d03-8c6f-f5c2d7bba682 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Waiting for the task: (returnval){ [ 984.416205] env[62503]: value = "task-1388092" [ 984.416205] env[62503]: _type = "Task" [ 984.416205] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 984.425067] env[62503]: DEBUG oslo_vmware.api [None req-11342709-c4fc-4d03-8c6f-f5c2d7bba682 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1388092, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.473576] env[62503]: DEBUG nova.compute.manager [req-a11f85b2-9534-4961-b3ab-88993cfffe2b req-47986630-f523-4cbf-8ea5-2fd720cf464a service nova] [instance: 7eb8e049-dd65-43bd-829a-8f773f7ad156] Received event network-vif-deleted-077e8de3-2f79-490d-8629-d1d9ffd38862 {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 984.473576] env[62503]: INFO nova.compute.manager [req-a11f85b2-9534-4961-b3ab-88993cfffe2b req-47986630-f523-4cbf-8ea5-2fd720cf464a service nova] [instance: 7eb8e049-dd65-43bd-829a-8f773f7ad156] Neutron deleted interface 077e8de3-2f79-490d-8629-d1d9ffd38862; detaching it from the instance and deleting it from the info cache [ 984.474210] env[62503]: DEBUG nova.network.neutron [req-a11f85b2-9534-4961-b3ab-88993cfffe2b req-47986630-f523-4cbf-8ea5-2fd720cf464a service nova] [instance: 7eb8e049-dd65-43bd-829a-8f773f7ad156] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 984.583887] env[62503]: DEBUG oslo_concurrency.lockutils [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Acquiring lock "refresh_cache-e693bcc2-3883-466d-913c-831146ca81e7" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 984.584116] env[62503]: DEBUG oslo_concurrency.lockutils [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Acquired lock "refresh_cache-e693bcc2-3883-466d-913c-831146ca81e7" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 984.584357] env[62503]: DEBUG nova.network.neutron [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] [instance: e693bcc2-3883-466d-913c-831146ca81e7] Forcefully refreshing network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 984.722972] env[62503]: DEBUG oslo_vmware.api [None req-58dc9104-7ebc-4206-b174-f8baeb5e8efe tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Task: {'id': task-1388090, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.14302} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 984.723573] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-58dc9104-7ebc-4206-b174-f8baeb5e8efe tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Deleted the datastore file {{(pid=62503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 984.723883] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-58dc9104-7ebc-4206-b174-f8baeb5e8efe tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] [instance: a4e7ec7f-82d1-4fd8-b9c4-bf869aa0968d] Deleted contents of the VM from datastore datastore2 {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 984.724114] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-58dc9104-7ebc-4206-b174-f8baeb5e8efe tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] [instance: a4e7ec7f-82d1-4fd8-b9c4-bf869aa0968d] Instance destroyed {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 984.724348] env[62503]: INFO nova.compute.manager [None req-58dc9104-7ebc-4206-b174-f8baeb5e8efe tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] [instance: a4e7ec7f-82d1-4fd8-b9c4-bf869aa0968d] Took 1.16 seconds to destroy the instance on the hypervisor. [ 984.724640] env[62503]: DEBUG oslo.service.loopingcall [None req-58dc9104-7ebc-4206-b174-f8baeb5e8efe tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 984.724880] env[62503]: DEBUG nova.compute.manager [-] [instance: a4e7ec7f-82d1-4fd8-b9c4-bf869aa0968d] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 984.725009] env[62503]: DEBUG nova.network.neutron [-] [instance: a4e7ec7f-82d1-4fd8-b9c4-bf869aa0968d] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 984.735067] env[62503]: DEBUG oslo_vmware.api [None req-0d96d3a2-8cbc-4623-882b-2a659064bac0 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Task: {'id': task-1388091, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.138896} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 984.735067] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-0d96d3a2-8cbc-4623-882b-2a659064bac0 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Deleted the datastore file {{(pid=62503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 984.735253] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-0d96d3a2-8cbc-4623-882b-2a659064bac0 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] [instance: b120b29f-0e26-465f-bc6f-4214525ae2de] Deleted contents of the VM from datastore datastore1 {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 984.735307] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-0d96d3a2-8cbc-4623-882b-2a659064bac0 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] [instance: b120b29f-0e26-465f-bc6f-4214525ae2de] Instance destroyed {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 984.735468] env[62503]: INFO nova.compute.manager [None req-0d96d3a2-8cbc-4623-882b-2a659064bac0 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] [instance: b120b29f-0e26-465f-bc6f-4214525ae2de] Took 1.17 seconds to destroy the instance on the hypervisor. [ 984.735691] env[62503]: DEBUG oslo.service.loopingcall [None req-0d96d3a2-8cbc-4623-882b-2a659064bac0 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 984.735875] env[62503]: DEBUG nova.compute.manager [-] [instance: b120b29f-0e26-465f-bc6f-4214525ae2de] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 984.735965] env[62503]: DEBUG nova.network.neutron [-] [instance: b120b29f-0e26-465f-bc6f-4214525ae2de] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 984.919659] env[62503]: DEBUG nova.compute.utils [None req-559d326e-c9ab-4657-aa3f-f3c3544de7c8 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Using /dev/sd instead of None {{(pid=62503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 984.921521] env[62503]: DEBUG nova.compute.manager [None req-559d326e-c9ab-4657-aa3f-f3c3544de7c8 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: c1a41261-03d3-4dde-9b90-68bdec1a548b] Allocating IP information in the background. {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 984.921690] env[62503]: DEBUG nova.network.neutron [None req-559d326e-c9ab-4657-aa3f-f3c3544de7c8 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: c1a41261-03d3-4dde-9b90-68bdec1a548b] allocate_for_instance() {{(pid=62503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 984.937731] env[62503]: DEBUG oslo_vmware.api [None req-11342709-c4fc-4d03-8c6f-f5c2d7bba682 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1388092, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.952317] env[62503]: DEBUG nova.network.neutron [-] [instance: 7eb8e049-dd65-43bd-829a-8f773f7ad156] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 984.964300] env[62503]: DEBUG nova.compute.manager [req-09805000-8e35-41a7-b198-00257320d10f req-66e3fe30-7e97-46c6-9bc7-fcf4c20a1a17 service nova] [instance: b120b29f-0e26-465f-bc6f-4214525ae2de] Received event network-vif-deleted-dac5db33-8213-452f-8974-e4fcfb5e3013 {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 984.964644] env[62503]: INFO nova.compute.manager [req-09805000-8e35-41a7-b198-00257320d10f req-66e3fe30-7e97-46c6-9bc7-fcf4c20a1a17 service nova] [instance: b120b29f-0e26-465f-bc6f-4214525ae2de] Neutron deleted interface dac5db33-8213-452f-8974-e4fcfb5e3013; detaching it from the instance and deleting it from the info cache [ 984.965117] env[62503]: DEBUG nova.network.neutron [req-09805000-8e35-41a7-b198-00257320d10f req-66e3fe30-7e97-46c6-9bc7-fcf4c20a1a17 service nova] [instance: b120b29f-0e26-465f-bc6f-4214525ae2de] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 984.967303] env[62503]: DEBUG nova.policy [None req-559d326e-c9ab-4657-aa3f-f3c3544de7c8 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b0e90b6b4c414583af760c03e336e4d1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f823912f7b1a4998a6dbc22060cf6c5e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62503) authorize /opt/stack/nova/nova/policy.py:201}} [ 984.976685] env[62503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-45642d8d-8fcd-4b50-b9ab-1e3cb5f29df2 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.986736] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d383ec2f-0aba-4fff-a9ab-1c7acf4dd52a {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.012244] env[62503]: DEBUG nova.compute.manager [req-a11f85b2-9534-4961-b3ab-88993cfffe2b req-47986630-f523-4cbf-8ea5-2fd720cf464a service nova] [instance: 7eb8e049-dd65-43bd-829a-8f773f7ad156] Detach interface failed, port_id=077e8de3-2f79-490d-8629-d1d9ffd38862, reason: Instance 7eb8e049-dd65-43bd-829a-8f773f7ad156 could not be found. {{(pid=62503) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11261}} [ 985.195483] env[62503]: DEBUG nova.network.neutron [None req-559d326e-c9ab-4657-aa3f-f3c3544de7c8 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: c1a41261-03d3-4dde-9b90-68bdec1a548b] Successfully created port: 9d56fc29-052f-4ca8-908c-17b026450550 {{(pid=62503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 985.437636] env[62503]: DEBUG nova.compute.manager [None req-559d326e-c9ab-4657-aa3f-f3c3544de7c8 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: c1a41261-03d3-4dde-9b90-68bdec1a548b] Start building block device mappings for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2867}} [ 985.440686] env[62503]: DEBUG oslo_vmware.api [None req-11342709-c4fc-4d03-8c6f-f5c2d7bba682 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1388092, 'name': ReconfigVM_Task, 'duration_secs': 0.851828} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 985.441156] env[62503]: DEBUG nova.network.neutron [-] [instance: a4e7ec7f-82d1-4fd8-b9c4-bf869aa0968d] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 985.442069] env[62503]: DEBUG nova.network.neutron [-] [instance: b120b29f-0e26-465f-bc6f-4214525ae2de] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 985.442990] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-11342709-c4fc-4d03-8c6f-f5c2d7bba682 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: ccc542a3-ff01-42ca-965e-706bed4c6e07] Reconfigured VM instance instance-0000005f to attach disk [datastore1] ccc542a3-ff01-42ca-965e-706bed4c6e07/ccc542a3-ff01-42ca-965e-706bed4c6e07.vmdk or device None with type sparse {{(pid=62503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 985.444254] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-58fc488f-586e-4b76-b357-91ba5b04643d {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.450664] env[62503]: DEBUG oslo_vmware.api [None req-11342709-c4fc-4d03-8c6f-f5c2d7bba682 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Waiting for the task: (returnval){ [ 985.450664] env[62503]: value = "task-1388093" [ 985.450664] env[62503]: _type = "Task" [ 985.450664] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 985.454285] env[62503]: INFO nova.compute.manager [-] [instance: 7eb8e049-dd65-43bd-829a-8f773f7ad156] Took 1.27 seconds to deallocate network for instance. [ 985.461356] env[62503]: DEBUG oslo_vmware.api [None req-11342709-c4fc-4d03-8c6f-f5c2d7bba682 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1388093, 'name': Rename_Task} progress is 5%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.469984] env[62503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c4130e5c-3de1-492f-952e-cba2ae2d2ba6 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.478616] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-703c6f7c-e923-44fb-9b2f-0e4fdd790825 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.506177] env[62503]: DEBUG nova.compute.manager [req-09805000-8e35-41a7-b198-00257320d10f req-66e3fe30-7e97-46c6-9bc7-fcf4c20a1a17 service nova] [instance: b120b29f-0e26-465f-bc6f-4214525ae2de] Detach interface failed, port_id=dac5db33-8213-452f-8974-e4fcfb5e3013, reason: Instance b120b29f-0e26-465f-bc6f-4214525ae2de could not be found. {{(pid=62503) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11261}} [ 985.813836] env[62503]: DEBUG nova.network.neutron [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] [instance: e693bcc2-3883-466d-913c-831146ca81e7] Updating instance_info_cache with network_info: [{"id": "71aa781d-4a40-4f00-8fb8-06cb4c73986a", "address": "fa:16:3e:24:a7:1c", "network": {"id": "ef338e2b-4aa8-4605-8748-910d0d3a3079", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-477257305-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.154", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "12a42517cf8f4ad3836f2f95e8833dd4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "13e71dbb-4279-427c-b39d-ba5df9895e58", "external-id": "nsx-vlan-transportzone-417", "segmentation_id": 417, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap71aa781d-4a", "ovs_interfaceid": "71aa781d-4a40-4f00-8fb8-06cb4c73986a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 985.944848] env[62503]: INFO nova.compute.manager [-] [instance: a4e7ec7f-82d1-4fd8-b9c4-bf869aa0968d] Took 1.22 seconds to deallocate network for instance. [ 985.946520] env[62503]: INFO nova.compute.manager [-] [instance: b120b29f-0e26-465f-bc6f-4214525ae2de] Took 1.21 seconds to deallocate network for instance. [ 985.963461] env[62503]: DEBUG oslo_vmware.api [None req-11342709-c4fc-4d03-8c6f-f5c2d7bba682 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1388093, 'name': Rename_Task, 'duration_secs': 0.312774} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 985.963746] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-11342709-c4fc-4d03-8c6f-f5c2d7bba682 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: ccc542a3-ff01-42ca-965e-706bed4c6e07] Powering on the VM {{(pid=62503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 985.964421] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e8ec8a05-025c-4189-b66d-de40a2a12cb2 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.968141] env[62503]: DEBUG oslo_concurrency.lockutils [None req-a37c3c15-a285-46bb-b3ad-140802bcfde5 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 985.968141] env[62503]: DEBUG oslo_concurrency.lockutils [None req-a37c3c15-a285-46bb-b3ad-140802bcfde5 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 985.968141] env[62503]: DEBUG nova.objects.instance [None req-a37c3c15-a285-46bb-b3ad-140802bcfde5 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Lazy-loading 'resources' on Instance uuid 7eb8e049-dd65-43bd-829a-8f773f7ad156 {{(pid=62503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 985.973808] env[62503]: DEBUG oslo_vmware.api [None req-11342709-c4fc-4d03-8c6f-f5c2d7bba682 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Waiting for the task: (returnval){ [ 985.973808] env[62503]: value = "task-1388094" [ 985.973808] env[62503]: _type = "Task" [ 985.973808] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 985.983186] env[62503]: DEBUG oslo_vmware.api [None req-11342709-c4fc-4d03-8c6f-f5c2d7bba682 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1388094, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.316760] env[62503]: DEBUG oslo_concurrency.lockutils [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Releasing lock "refresh_cache-e693bcc2-3883-466d-913c-831146ca81e7" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 986.317127] env[62503]: DEBUG nova.compute.manager [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] [instance: e693bcc2-3883-466d-913c-831146ca81e7] Updated the network info_cache for instance {{(pid=62503) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10295}} [ 986.317218] env[62503]: DEBUG oslo_service.periodic_task [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 986.317380] env[62503]: DEBUG oslo_service.periodic_task [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 986.317527] env[62503]: DEBUG oslo_service.periodic_task [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 986.317689] env[62503]: DEBUG oslo_service.periodic_task [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 986.317833] env[62503]: DEBUG oslo_service.periodic_task [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 986.317977] env[62503]: DEBUG oslo_service.periodic_task [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 986.318119] env[62503]: DEBUG nova.compute.manager [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62503) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10843}} [ 986.318265] env[62503]: DEBUG oslo_service.periodic_task [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 986.450185] env[62503]: DEBUG nova.compute.manager [None req-559d326e-c9ab-4657-aa3f-f3c3544de7c8 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: c1a41261-03d3-4dde-9b90-68bdec1a548b] Start spawning the instance on the hypervisor. {{(pid=62503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2641}} [ 986.454077] env[62503]: DEBUG oslo_concurrency.lockutils [None req-58dc9104-7ebc-4206-b174-f8baeb5e8efe tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 986.456919] env[62503]: DEBUG oslo_concurrency.lockutils [None req-0d96d3a2-8cbc-4623-882b-2a659064bac0 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 986.474893] env[62503]: DEBUG nova.virt.hardware [None req-559d326e-c9ab-4657-aa3f-f3c3544de7c8 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:26:20Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:26:03Z,direct_url=,disk_format='vmdk',id=8150ca02-f879-471d-8913-459408f127a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26d9ee75d25b4018b8bb1fb11c8bd98c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:26:04Z,virtual_size=,visibility=), allow threads: False {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 986.475159] env[62503]: DEBUG nova.virt.hardware [None req-559d326e-c9ab-4657-aa3f-f3c3544de7c8 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Flavor limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 986.475320] env[62503]: DEBUG nova.virt.hardware [None req-559d326e-c9ab-4657-aa3f-f3c3544de7c8 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Image limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 986.475506] env[62503]: DEBUG nova.virt.hardware [None req-559d326e-c9ab-4657-aa3f-f3c3544de7c8 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Flavor pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 986.475658] env[62503]: DEBUG nova.virt.hardware [None req-559d326e-c9ab-4657-aa3f-f3c3544de7c8 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Image pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 986.475808] env[62503]: DEBUG nova.virt.hardware [None req-559d326e-c9ab-4657-aa3f-f3c3544de7c8 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 986.476018] env[62503]: DEBUG nova.virt.hardware [None req-559d326e-c9ab-4657-aa3f-f3c3544de7c8 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 986.476235] env[62503]: DEBUG nova.virt.hardware [None req-559d326e-c9ab-4657-aa3f-f3c3544de7c8 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 986.476356] env[62503]: DEBUG nova.virt.hardware [None req-559d326e-c9ab-4657-aa3f-f3c3544de7c8 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Got 1 possible topologies {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 986.476521] env[62503]: DEBUG nova.virt.hardware [None req-559d326e-c9ab-4657-aa3f-f3c3544de7c8 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 986.476708] env[62503]: DEBUG nova.virt.hardware [None req-559d326e-c9ab-4657-aa3f-f3c3544de7c8 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 986.477626] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63e3d7ff-7d39-47a1-90e2-5e99815e3875 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.491637] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bede05e-39b0-40d6-8409-505c12c2ea9d {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.495390] env[62503]: DEBUG oslo_vmware.api [None req-11342709-c4fc-4d03-8c6f-f5c2d7bba682 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1388094, 'name': PowerOnVM_Task, 'duration_secs': 0.421029} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 986.497985] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-11342709-c4fc-4d03-8c6f-f5c2d7bba682 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: ccc542a3-ff01-42ca-965e-706bed4c6e07] Powered on the VM {{(pid=62503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 986.498192] env[62503]: INFO nova.compute.manager [None req-11342709-c4fc-4d03-8c6f-f5c2d7bba682 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: ccc542a3-ff01-42ca-965e-706bed4c6e07] Took 8.00 seconds to spawn the instance on the hypervisor. [ 986.498392] env[62503]: DEBUG nova.compute.manager [None req-11342709-c4fc-4d03-8c6f-f5c2d7bba682 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: ccc542a3-ff01-42ca-965e-706bed4c6e07] Checking state {{(pid=62503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1794}} [ 986.500638] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f4c9e7d-a1b9-4884-bc83-66f059611b78 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.504209] env[62503]: DEBUG nova.compute.manager [req-beef73ba-3cbe-43d5-a9a8-d45ad373b532 req-b184d5e9-57e2-4579-ac83-df0e4c462fd8 service nova] [instance: a4e7ec7f-82d1-4fd8-b9c4-bf869aa0968d] Received event network-vif-deleted-6d46d504-d2d1-40ff-ac6f-6355cc56d1d4 {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 986.605669] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64c35afd-8271-4f4c-a398-8e3685ad0dd6 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.613015] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f662382e-65ab-41fd-add7-a2882f2baad9 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.643796] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fa481e8-bc67-4a28-907a-e4d3c07143ce {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.651913] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e353281-859f-4bda-b127-f648bc49e403 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.665229] env[62503]: DEBUG nova.compute.provider_tree [None req-a37c3c15-a285-46bb-b3ad-140802bcfde5 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 986.689033] env[62503]: DEBUG nova.network.neutron [None req-559d326e-c9ab-4657-aa3f-f3c3544de7c8 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: c1a41261-03d3-4dde-9b90-68bdec1a548b] Successfully updated port: 9d56fc29-052f-4ca8-908c-17b026450550 {{(pid=62503) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 986.820873] env[62503]: DEBUG oslo_concurrency.lockutils [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 987.030050] env[62503]: INFO nova.compute.manager [None req-11342709-c4fc-4d03-8c6f-f5c2d7bba682 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: ccc542a3-ff01-42ca-965e-706bed4c6e07] Took 19.74 seconds to build instance. [ 987.168165] env[62503]: DEBUG nova.scheduler.client.report [None req-a37c3c15-a285-46bb-b3ad-140802bcfde5 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 987.191383] env[62503]: DEBUG oslo_concurrency.lockutils [None req-559d326e-c9ab-4657-aa3f-f3c3544de7c8 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Acquiring lock "refresh_cache-c1a41261-03d3-4dde-9b90-68bdec1a548b" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 987.191588] env[62503]: DEBUG oslo_concurrency.lockutils [None req-559d326e-c9ab-4657-aa3f-f3c3544de7c8 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Acquired lock "refresh_cache-c1a41261-03d3-4dde-9b90-68bdec1a548b" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 987.191755] env[62503]: DEBUG nova.network.neutron [None req-559d326e-c9ab-4657-aa3f-f3c3544de7c8 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: c1a41261-03d3-4dde-9b90-68bdec1a548b] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 987.532333] env[62503]: DEBUG oslo_concurrency.lockutils [None req-11342709-c4fc-4d03-8c6f-f5c2d7bba682 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Lock "ccc542a3-ff01-42ca-965e-706bed4c6e07" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.253s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 987.672945] env[62503]: DEBUG oslo_concurrency.lockutils [None req-a37c3c15-a285-46bb-b3ad-140802bcfde5 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.706s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 987.675302] env[62503]: DEBUG oslo_concurrency.lockutils [None req-58dc9104-7ebc-4206-b174-f8baeb5e8efe tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.221s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 987.675544] env[62503]: DEBUG nova.objects.instance [None req-58dc9104-7ebc-4206-b174-f8baeb5e8efe tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Lazy-loading 'resources' on Instance uuid a4e7ec7f-82d1-4fd8-b9c4-bf869aa0968d {{(pid=62503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 987.691022] env[62503]: INFO nova.scheduler.client.report [None req-a37c3c15-a285-46bb-b3ad-140802bcfde5 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Deleted allocations for instance 7eb8e049-dd65-43bd-829a-8f773f7ad156 [ 987.722965] env[62503]: DEBUG nova.network.neutron [None req-559d326e-c9ab-4657-aa3f-f3c3544de7c8 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: c1a41261-03d3-4dde-9b90-68bdec1a548b] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 987.864706] env[62503]: DEBUG nova.network.neutron [None req-559d326e-c9ab-4657-aa3f-f3c3544de7c8 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: c1a41261-03d3-4dde-9b90-68bdec1a548b] Updating instance_info_cache with network_info: [{"id": "9d56fc29-052f-4ca8-908c-17b026450550", "address": "fa:16:3e:db:af:c1", "network": {"id": "fd4d9644-88d7-4c3d-9bee-9e25ca3a7d0b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2096443058-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f823912f7b1a4998a6dbc22060cf6c5e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "685b4083-b748-41fb-a68a-273b1073fa28", "external-id": "nsx-vlan-transportzone-312", "segmentation_id": 312, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9d56fc29-05", "ovs_interfaceid": "9d56fc29-052f-4ca8-908c-17b026450550", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 988.196660] env[62503]: DEBUG nova.compute.manager [req-2c449089-ffba-415c-a529-8ac1ff65ff9d req-041ef057-97d9-4578-85a8-c36107dc8cb2 service nova] [instance: ccc542a3-ff01-42ca-965e-706bed4c6e07] Received event network-changed-a06bfb1f-0b51-4150-8e23-cdfe68e9c27f {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 988.196855] env[62503]: DEBUG nova.compute.manager [req-2c449089-ffba-415c-a529-8ac1ff65ff9d req-041ef057-97d9-4578-85a8-c36107dc8cb2 service nova] [instance: ccc542a3-ff01-42ca-965e-706bed4c6e07] Refreshing instance network info cache due to event network-changed-a06bfb1f-0b51-4150-8e23-cdfe68e9c27f. {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11432}} [ 988.197046] env[62503]: DEBUG oslo_concurrency.lockutils [req-2c449089-ffba-415c-a529-8ac1ff65ff9d req-041ef057-97d9-4578-85a8-c36107dc8cb2 service nova] Acquiring lock "refresh_cache-ccc542a3-ff01-42ca-965e-706bed4c6e07" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 988.197196] env[62503]: DEBUG oslo_concurrency.lockutils [req-2c449089-ffba-415c-a529-8ac1ff65ff9d req-041ef057-97d9-4578-85a8-c36107dc8cb2 service nova] Acquired lock "refresh_cache-ccc542a3-ff01-42ca-965e-706bed4c6e07" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 988.197366] env[62503]: DEBUG nova.network.neutron [req-2c449089-ffba-415c-a529-8ac1ff65ff9d req-041ef057-97d9-4578-85a8-c36107dc8cb2 service nova] [instance: ccc542a3-ff01-42ca-965e-706bed4c6e07] Refreshing network info cache for port a06bfb1f-0b51-4150-8e23-cdfe68e9c27f {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 988.203435] env[62503]: DEBUG oslo_concurrency.lockutils [None req-a37c3c15-a285-46bb-b3ad-140802bcfde5 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Lock "7eb8e049-dd65-43bd-829a-8f773f7ad156" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.146s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 988.295407] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86cfadeb-ccf6-40d8-9221-00618f7d1ad9 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.303334] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4c062aa-28fc-4407-a7a2-3a95a457faeb {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.333094] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-845c71e8-73cc-44cd-b413-3df3c5273a27 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.340340] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb80878c-e8cc-4658-a9fe-8757ccf52e52 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.355498] env[62503]: DEBUG nova.compute.provider_tree [None req-58dc9104-7ebc-4206-b174-f8baeb5e8efe tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 988.371012] env[62503]: DEBUG oslo_concurrency.lockutils [None req-559d326e-c9ab-4657-aa3f-f3c3544de7c8 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Releasing lock "refresh_cache-c1a41261-03d3-4dde-9b90-68bdec1a548b" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 988.371012] env[62503]: DEBUG nova.compute.manager [None req-559d326e-c9ab-4657-aa3f-f3c3544de7c8 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: c1a41261-03d3-4dde-9b90-68bdec1a548b] Instance network_info: |[{"id": "9d56fc29-052f-4ca8-908c-17b026450550", "address": "fa:16:3e:db:af:c1", "network": {"id": "fd4d9644-88d7-4c3d-9bee-9e25ca3a7d0b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2096443058-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f823912f7b1a4998a6dbc22060cf6c5e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "685b4083-b748-41fb-a68a-273b1073fa28", "external-id": "nsx-vlan-transportzone-312", "segmentation_id": 312, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9d56fc29-05", "ovs_interfaceid": "9d56fc29-052f-4ca8-908c-17b026450550", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1999}} [ 988.371645] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-559d326e-c9ab-4657-aa3f-f3c3544de7c8 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: c1a41261-03d3-4dde-9b90-68bdec1a548b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:db:af:c1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '685b4083-b748-41fb-a68a-273b1073fa28', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9d56fc29-052f-4ca8-908c-17b026450550', 'vif_model': 'vmxnet3'}] {{(pid=62503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 988.378979] env[62503]: DEBUG oslo.service.loopingcall [None req-559d326e-c9ab-4657-aa3f-f3c3544de7c8 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 988.379204] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c1a41261-03d3-4dde-9b90-68bdec1a548b] Creating VM on the ESX host {{(pid=62503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 988.379667] env[62503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f0837737-15cd-4ddd-a6ad-d6a8ea6a439e {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.399159] env[62503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 988.399159] env[62503]: value = "task-1388095" [ 988.399159] env[62503]: _type = "Task" [ 988.399159] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.406642] env[62503]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388095, 'name': CreateVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.420230] env[62503]: DEBUG oslo_concurrency.lockutils [None req-52f08c30-735d-4a21-9a98-99d91426f69e tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Acquiring lock "ce9c4d77-6cbe-411d-a0fd-d77a6d6f36b7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 988.420485] env[62503]: DEBUG oslo_concurrency.lockutils [None req-52f08c30-735d-4a21-9a98-99d91426f69e tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Lock "ce9c4d77-6cbe-411d-a0fd-d77a6d6f36b7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 988.420712] env[62503]: DEBUG oslo_concurrency.lockutils [None req-52f08c30-735d-4a21-9a98-99d91426f69e tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Acquiring lock "ce9c4d77-6cbe-411d-a0fd-d77a6d6f36b7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 988.420914] env[62503]: DEBUG oslo_concurrency.lockutils [None req-52f08c30-735d-4a21-9a98-99d91426f69e tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Lock "ce9c4d77-6cbe-411d-a0fd-d77a6d6f36b7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 988.421114] env[62503]: DEBUG oslo_concurrency.lockutils [None req-52f08c30-735d-4a21-9a98-99d91426f69e tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Lock "ce9c4d77-6cbe-411d-a0fd-d77a6d6f36b7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 988.423311] env[62503]: INFO nova.compute.manager [None req-52f08c30-735d-4a21-9a98-99d91426f69e tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: ce9c4d77-6cbe-411d-a0fd-d77a6d6f36b7] Terminating instance [ 988.425040] env[62503]: DEBUG nova.compute.manager [None req-52f08c30-735d-4a21-9a98-99d91426f69e tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: ce9c4d77-6cbe-411d-a0fd-d77a6d6f36b7] Start destroying the instance on the hypervisor. {{(pid=62503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3156}} [ 988.425244] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-52f08c30-735d-4a21-9a98-99d91426f69e tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: ce9c4d77-6cbe-411d-a0fd-d77a6d6f36b7] Destroying instance {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 988.426029] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a908443d-8c3d-4401-bfd5-7e02f1be05ca {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.434079] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-52f08c30-735d-4a21-9a98-99d91426f69e tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: ce9c4d77-6cbe-411d-a0fd-d77a6d6f36b7] Powering off the VM {{(pid=62503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 988.434212] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-51fd513f-48e8-44ab-90ef-273f6e286d70 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.440629] env[62503]: DEBUG oslo_vmware.api [None req-52f08c30-735d-4a21-9a98-99d91426f69e tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Waiting for the task: (returnval){ [ 988.440629] env[62503]: value = "task-1388096" [ 988.440629] env[62503]: _type = "Task" [ 988.440629] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.447822] env[62503]: DEBUG oslo_vmware.api [None req-52f08c30-735d-4a21-9a98-99d91426f69e tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Task: {'id': task-1388096, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.529503] env[62503]: DEBUG nova.compute.manager [req-60a57e5a-00c5-4e1c-a9ca-9b95f80f72c2 req-748c8c0c-e4fe-4db4-88c9-acec64dbd7ab service nova] [instance: c1a41261-03d3-4dde-9b90-68bdec1a548b] Received event network-vif-plugged-9d56fc29-052f-4ca8-908c-17b026450550 {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 988.529775] env[62503]: DEBUG oslo_concurrency.lockutils [req-60a57e5a-00c5-4e1c-a9ca-9b95f80f72c2 req-748c8c0c-e4fe-4db4-88c9-acec64dbd7ab service nova] Acquiring lock "c1a41261-03d3-4dde-9b90-68bdec1a548b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 988.529993] env[62503]: DEBUG oslo_concurrency.lockutils [req-60a57e5a-00c5-4e1c-a9ca-9b95f80f72c2 req-748c8c0c-e4fe-4db4-88c9-acec64dbd7ab service nova] Lock "c1a41261-03d3-4dde-9b90-68bdec1a548b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 988.530182] env[62503]: DEBUG oslo_concurrency.lockutils [req-60a57e5a-00c5-4e1c-a9ca-9b95f80f72c2 req-748c8c0c-e4fe-4db4-88c9-acec64dbd7ab service nova] Lock "c1a41261-03d3-4dde-9b90-68bdec1a548b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 988.530352] env[62503]: DEBUG nova.compute.manager [req-60a57e5a-00c5-4e1c-a9ca-9b95f80f72c2 req-748c8c0c-e4fe-4db4-88c9-acec64dbd7ab service nova] [instance: c1a41261-03d3-4dde-9b90-68bdec1a548b] No waiting events found dispatching network-vif-plugged-9d56fc29-052f-4ca8-908c-17b026450550 {{(pid=62503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 988.530517] env[62503]: WARNING nova.compute.manager [req-60a57e5a-00c5-4e1c-a9ca-9b95f80f72c2 req-748c8c0c-e4fe-4db4-88c9-acec64dbd7ab service nova] [instance: c1a41261-03d3-4dde-9b90-68bdec1a548b] Received unexpected event network-vif-plugged-9d56fc29-052f-4ca8-908c-17b026450550 for instance with vm_state building and task_state spawning. [ 988.530683] env[62503]: DEBUG nova.compute.manager [req-60a57e5a-00c5-4e1c-a9ca-9b95f80f72c2 req-748c8c0c-e4fe-4db4-88c9-acec64dbd7ab service nova] [instance: c1a41261-03d3-4dde-9b90-68bdec1a548b] Received event network-changed-9d56fc29-052f-4ca8-908c-17b026450550 {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 988.530840] env[62503]: DEBUG nova.compute.manager [req-60a57e5a-00c5-4e1c-a9ca-9b95f80f72c2 req-748c8c0c-e4fe-4db4-88c9-acec64dbd7ab service nova] [instance: c1a41261-03d3-4dde-9b90-68bdec1a548b] Refreshing instance network info cache due to event network-changed-9d56fc29-052f-4ca8-908c-17b026450550. {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11432}} [ 988.531030] env[62503]: DEBUG oslo_concurrency.lockutils [req-60a57e5a-00c5-4e1c-a9ca-9b95f80f72c2 req-748c8c0c-e4fe-4db4-88c9-acec64dbd7ab service nova] Acquiring lock "refresh_cache-c1a41261-03d3-4dde-9b90-68bdec1a548b" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 988.531168] env[62503]: DEBUG oslo_concurrency.lockutils [req-60a57e5a-00c5-4e1c-a9ca-9b95f80f72c2 req-748c8c0c-e4fe-4db4-88c9-acec64dbd7ab service nova] Acquired lock "refresh_cache-c1a41261-03d3-4dde-9b90-68bdec1a548b" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 988.531327] env[62503]: DEBUG nova.network.neutron [req-60a57e5a-00c5-4e1c-a9ca-9b95f80f72c2 req-748c8c0c-e4fe-4db4-88c9-acec64dbd7ab service nova] [instance: c1a41261-03d3-4dde-9b90-68bdec1a548b] Refreshing network info cache for port 9d56fc29-052f-4ca8-908c-17b026450550 {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 988.858413] env[62503]: DEBUG nova.scheduler.client.report [None req-58dc9104-7ebc-4206-b174-f8baeb5e8efe tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 988.909902] env[62503]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388095, 'name': CreateVM_Task, 'duration_secs': 0.372138} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 988.912174] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c1a41261-03d3-4dde-9b90-68bdec1a548b] Created VM on the ESX host {{(pid=62503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 988.912864] env[62503]: DEBUG oslo_concurrency.lockutils [None req-559d326e-c9ab-4657-aa3f-f3c3544de7c8 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 988.913052] env[62503]: DEBUG oslo_concurrency.lockutils [None req-559d326e-c9ab-4657-aa3f-f3c3544de7c8 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Acquired lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 988.913370] env[62503]: DEBUG oslo_concurrency.lockutils [None req-559d326e-c9ab-4657-aa3f-f3c3544de7c8 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 988.913946] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d3e6e133-679e-45cc-b1d6-89105996229a {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.918675] env[62503]: DEBUG oslo_vmware.api [None req-559d326e-c9ab-4657-aa3f-f3c3544de7c8 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Waiting for the task: (returnval){ [ 988.918675] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52915adb-6f8d-9979-46a2-a4f8b95d9eac" [ 988.918675] env[62503]: _type = "Task" [ 988.918675] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.926714] env[62503]: DEBUG oslo_vmware.api [None req-559d326e-c9ab-4657-aa3f-f3c3544de7c8 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52915adb-6f8d-9979-46a2-a4f8b95d9eac, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.950210] env[62503]: DEBUG oslo_vmware.api [None req-52f08c30-735d-4a21-9a98-99d91426f69e tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Task: {'id': task-1388096, 'name': PowerOffVM_Task, 'duration_secs': 0.220276} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 988.950496] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-52f08c30-735d-4a21-9a98-99d91426f69e tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: ce9c4d77-6cbe-411d-a0fd-d77a6d6f36b7] Powered off the VM {{(pid=62503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 988.950763] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-52f08c30-735d-4a21-9a98-99d91426f69e tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: ce9c4d77-6cbe-411d-a0fd-d77a6d6f36b7] Unregistering the VM {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 988.951063] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e60682af-16c8-4eec-ad6e-61aa4bd4ffb9 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.957714] env[62503]: DEBUG nova.network.neutron [req-2c449089-ffba-415c-a529-8ac1ff65ff9d req-041ef057-97d9-4578-85a8-c36107dc8cb2 service nova] [instance: ccc542a3-ff01-42ca-965e-706bed4c6e07] Updated VIF entry in instance network info cache for port a06bfb1f-0b51-4150-8e23-cdfe68e9c27f. {{(pid=62503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 988.958155] env[62503]: DEBUG nova.network.neutron [req-2c449089-ffba-415c-a529-8ac1ff65ff9d req-041ef057-97d9-4578-85a8-c36107dc8cb2 service nova] [instance: ccc542a3-ff01-42ca-965e-706bed4c6e07] Updating instance_info_cache with network_info: [{"id": "a06bfb1f-0b51-4150-8e23-cdfe68e9c27f", "address": "fa:16:3e:25:71:4d", "network": {"id": "1705446b-db63-4b14-9929-b692ca586b36", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1437181237-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.222", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e818e5ee9dc24efa96747c9558514a15", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "77aa121f-8fb6-42f3-aaea-43addfe449b2", "external-id": "nsx-vlan-transportzone-288", "segmentation_id": 288, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa06bfb1f-0b", "ovs_interfaceid": "a06bfb1f-0b51-4150-8e23-cdfe68e9c27f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 988.959680] env[62503]: DEBUG oslo_concurrency.lockutils [None req-d56402ad-0390-4eaa-b039-d48df2ed199a tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Acquiring lock "81545f5b-a9ba-4d58-aaca-62a2652a8102" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 988.959906] env[62503]: DEBUG oslo_concurrency.lockutils [None req-d56402ad-0390-4eaa-b039-d48df2ed199a tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Lock "81545f5b-a9ba-4d58-aaca-62a2652a8102" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 989.017291] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-52f08c30-735d-4a21-9a98-99d91426f69e tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: ce9c4d77-6cbe-411d-a0fd-d77a6d6f36b7] Unregistered the VM {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 989.017520] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-52f08c30-735d-4a21-9a98-99d91426f69e tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: ce9c4d77-6cbe-411d-a0fd-d77a6d6f36b7] Deleting contents of the VM from datastore datastore2 {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 989.017711] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-52f08c30-735d-4a21-9a98-99d91426f69e tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Deleting the datastore file [datastore2] ce9c4d77-6cbe-411d-a0fd-d77a6d6f36b7 {{(pid=62503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 989.017987] env[62503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7d7cf067-d0e9-4b07-b397-bba0b83b6c45 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.024963] env[62503]: DEBUG oslo_vmware.api [None req-52f08c30-735d-4a21-9a98-99d91426f69e tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Waiting for the task: (returnval){ [ 989.024963] env[62503]: value = "task-1388098" [ 989.024963] env[62503]: _type = "Task" [ 989.024963] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.032695] env[62503]: DEBUG oslo_vmware.api [None req-52f08c30-735d-4a21-9a98-99d91426f69e tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Task: {'id': task-1388098, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.230565] env[62503]: DEBUG nova.network.neutron [req-60a57e5a-00c5-4e1c-a9ca-9b95f80f72c2 req-748c8c0c-e4fe-4db4-88c9-acec64dbd7ab service nova] [instance: c1a41261-03d3-4dde-9b90-68bdec1a548b] Updated VIF entry in instance network info cache for port 9d56fc29-052f-4ca8-908c-17b026450550. {{(pid=62503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 989.230939] env[62503]: DEBUG nova.network.neutron [req-60a57e5a-00c5-4e1c-a9ca-9b95f80f72c2 req-748c8c0c-e4fe-4db4-88c9-acec64dbd7ab service nova] [instance: c1a41261-03d3-4dde-9b90-68bdec1a548b] Updating instance_info_cache with network_info: [{"id": "9d56fc29-052f-4ca8-908c-17b026450550", "address": "fa:16:3e:db:af:c1", "network": {"id": "fd4d9644-88d7-4c3d-9bee-9e25ca3a7d0b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2096443058-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f823912f7b1a4998a6dbc22060cf6c5e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "685b4083-b748-41fb-a68a-273b1073fa28", "external-id": "nsx-vlan-transportzone-312", "segmentation_id": 312, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9d56fc29-05", "ovs_interfaceid": "9d56fc29-052f-4ca8-908c-17b026450550", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 989.365915] env[62503]: DEBUG oslo_concurrency.lockutils [None req-58dc9104-7ebc-4206-b174-f8baeb5e8efe tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.691s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 989.368563] env[62503]: DEBUG oslo_concurrency.lockutils [None req-0d96d3a2-8cbc-4623-882b-2a659064bac0 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.911s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 989.368563] env[62503]: DEBUG nova.objects.instance [None req-0d96d3a2-8cbc-4623-882b-2a659064bac0 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Lazy-loading 'resources' on Instance uuid b120b29f-0e26-465f-bc6f-4214525ae2de {{(pid=62503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 989.393346] env[62503]: INFO nova.scheduler.client.report [None req-58dc9104-7ebc-4206-b174-f8baeb5e8efe tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Deleted allocations for instance a4e7ec7f-82d1-4fd8-b9c4-bf869aa0968d [ 989.429070] env[62503]: DEBUG oslo_vmware.api [None req-559d326e-c9ab-4657-aa3f-f3c3544de7c8 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52915adb-6f8d-9979-46a2-a4f8b95d9eac, 'name': SearchDatastore_Task, 'duration_secs': 0.011237} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 989.429368] env[62503]: DEBUG oslo_concurrency.lockutils [None req-559d326e-c9ab-4657-aa3f-f3c3544de7c8 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Releasing lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 989.429698] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-559d326e-c9ab-4657-aa3f-f3c3544de7c8 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: c1a41261-03d3-4dde-9b90-68bdec1a548b] Processing image 8150ca02-f879-471d-8913-459408f127a1 {{(pid=62503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 989.429959] env[62503]: DEBUG oslo_concurrency.lockutils [None req-559d326e-c9ab-4657-aa3f-f3c3544de7c8 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 989.430121] env[62503]: DEBUG oslo_concurrency.lockutils [None req-559d326e-c9ab-4657-aa3f-f3c3544de7c8 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Acquired lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 989.430465] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-559d326e-c9ab-4657-aa3f-f3c3544de7c8 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 989.430568] env[62503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bf0b8590-7ebf-4587-9922-319b1d6cf38a {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.438351] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-559d326e-c9ab-4657-aa3f-f3c3544de7c8 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 989.438524] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-559d326e-c9ab-4657-aa3f-f3c3544de7c8 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 989.439274] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3e13fd0f-c079-4a91-9c19-2f90fe07a955 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.444276] env[62503]: DEBUG oslo_vmware.api [None req-559d326e-c9ab-4657-aa3f-f3c3544de7c8 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Waiting for the task: (returnval){ [ 989.444276] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52e65cb9-e601-8bbb-8367-ee2ab9a1b782" [ 989.444276] env[62503]: _type = "Task" [ 989.444276] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.451235] env[62503]: DEBUG oslo_vmware.api [None req-559d326e-c9ab-4657-aa3f-f3c3544de7c8 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52e65cb9-e601-8bbb-8367-ee2ab9a1b782, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.462152] env[62503]: DEBUG oslo_concurrency.lockutils [req-2c449089-ffba-415c-a529-8ac1ff65ff9d req-041ef057-97d9-4578-85a8-c36107dc8cb2 service nova] Releasing lock "refresh_cache-ccc542a3-ff01-42ca-965e-706bed4c6e07" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 989.462644] env[62503]: DEBUG nova.compute.manager [None req-d56402ad-0390-4eaa-b039-d48df2ed199a tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: 81545f5b-a9ba-4d58-aaca-62a2652a8102] Starting instance... {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 989.535545] env[62503]: DEBUG oslo_vmware.api [None req-52f08c30-735d-4a21-9a98-99d91426f69e tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Task: {'id': task-1388098, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.149393} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 989.535817] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-52f08c30-735d-4a21-9a98-99d91426f69e tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Deleted the datastore file {{(pid=62503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 989.536051] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-52f08c30-735d-4a21-9a98-99d91426f69e tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: ce9c4d77-6cbe-411d-a0fd-d77a6d6f36b7] Deleted contents of the VM from datastore datastore2 {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 989.536243] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-52f08c30-735d-4a21-9a98-99d91426f69e tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: ce9c4d77-6cbe-411d-a0fd-d77a6d6f36b7] Instance destroyed {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 989.536421] env[62503]: INFO nova.compute.manager [None req-52f08c30-735d-4a21-9a98-99d91426f69e tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: ce9c4d77-6cbe-411d-a0fd-d77a6d6f36b7] Took 1.11 seconds to destroy the instance on the hypervisor. [ 989.536667] env[62503]: DEBUG oslo.service.loopingcall [None req-52f08c30-735d-4a21-9a98-99d91426f69e tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 989.537161] env[62503]: DEBUG nova.compute.manager [-] [instance: ce9c4d77-6cbe-411d-a0fd-d77a6d6f36b7] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 989.537161] env[62503]: DEBUG nova.network.neutron [-] [instance: ce9c4d77-6cbe-411d-a0fd-d77a6d6f36b7] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 989.733340] env[62503]: DEBUG oslo_concurrency.lockutils [req-60a57e5a-00c5-4e1c-a9ca-9b95f80f72c2 req-748c8c0c-e4fe-4db4-88c9-acec64dbd7ab service nova] Releasing lock "refresh_cache-c1a41261-03d3-4dde-9b90-68bdec1a548b" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 989.903828] env[62503]: DEBUG oslo_concurrency.lockutils [None req-58dc9104-7ebc-4206-b174-f8baeb5e8efe tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Lock "a4e7ec7f-82d1-4fd8-b9c4-bf869aa0968d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.352s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 989.954508] env[62503]: DEBUG oslo_vmware.api [None req-559d326e-c9ab-4657-aa3f-f3c3544de7c8 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52e65cb9-e601-8bbb-8367-ee2ab9a1b782, 'name': SearchDatastore_Task, 'duration_secs': 0.00716} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 989.958885] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b4b950ac-efa1-487b-acc4-b1c31057059e {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.969960] env[62503]: DEBUG oslo_vmware.api [None req-559d326e-c9ab-4657-aa3f-f3c3544de7c8 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Waiting for the task: (returnval){ [ 989.969960] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52a0ffb9-b8de-9abb-db73-c26c510e1c4f" [ 989.969960] env[62503]: _type = "Task" [ 989.969960] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.986992] env[62503]: DEBUG oslo_vmware.api [None req-559d326e-c9ab-4657-aa3f-f3c3544de7c8 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52a0ffb9-b8de-9abb-db73-c26c510e1c4f, 'name': SearchDatastore_Task, 'duration_secs': 0.009514} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 989.987935] env[62503]: DEBUG oslo_concurrency.lockutils [None req-d56402ad-0390-4eaa-b039-d48df2ed199a tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 989.988234] env[62503]: DEBUG oslo_concurrency.lockutils [None req-559d326e-c9ab-4657-aa3f-f3c3544de7c8 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Releasing lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 989.988587] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-559d326e-c9ab-4657-aa3f-f3c3544de7c8 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk to [datastore1] c1a41261-03d3-4dde-9b90-68bdec1a548b/c1a41261-03d3-4dde-9b90-68bdec1a548b.vmdk {{(pid=62503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 989.989143] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-84ab50c8-ba00-4e6f-91c7-b381f74adf7a {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.997812] env[62503]: DEBUG oslo_vmware.api [None req-559d326e-c9ab-4657-aa3f-f3c3544de7c8 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Waiting for the task: (returnval){ [ 989.997812] env[62503]: value = "task-1388099" [ 989.997812] env[62503]: _type = "Task" [ 989.997812] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.006203] env[62503]: DEBUG oslo_vmware.api [None req-559d326e-c9ab-4657-aa3f-f3c3544de7c8 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Task: {'id': task-1388099, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.007640] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a335be29-a7c1-449f-95a3-21c7cb061de8 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.013719] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bca531a7-1413-4aa7-a046-d9b92cc3a901 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.050682] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a824a59-bbda-4bad-9ebd-e208917a716c {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.058811] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ee3c837-65f0-4e7f-9421-e4af30a89d2f {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.075343] env[62503]: DEBUG nova.compute.provider_tree [None req-0d96d3a2-8cbc-4623-882b-2a659064bac0 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 990.507990] env[62503]: DEBUG oslo_vmware.api [None req-559d326e-c9ab-4657-aa3f-f3c3544de7c8 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Task: {'id': task-1388099, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.552968] env[62503]: DEBUG nova.compute.manager [req-04b77012-9ad2-4518-afe5-d16646dc1654 req-ccc946fa-afc7-4899-9857-2a37cf6b0e3f service nova] [instance: ce9c4d77-6cbe-411d-a0fd-d77a6d6f36b7] Received event network-vif-deleted-228b554a-2e8c-4413-833d-65514ca1285a {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 990.553189] env[62503]: INFO nova.compute.manager [req-04b77012-9ad2-4518-afe5-d16646dc1654 req-ccc946fa-afc7-4899-9857-2a37cf6b0e3f service nova] [instance: ce9c4d77-6cbe-411d-a0fd-d77a6d6f36b7] Neutron deleted interface 228b554a-2e8c-4413-833d-65514ca1285a; detaching it from the instance and deleting it from the info cache [ 990.553366] env[62503]: DEBUG nova.network.neutron [req-04b77012-9ad2-4518-afe5-d16646dc1654 req-ccc946fa-afc7-4899-9857-2a37cf6b0e3f service nova] [instance: ce9c4d77-6cbe-411d-a0fd-d77a6d6f36b7] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 990.578452] env[62503]: DEBUG nova.scheduler.client.report [None req-0d96d3a2-8cbc-4623-882b-2a659064bac0 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 990.709801] env[62503]: DEBUG nova.network.neutron [-] [instance: ce9c4d77-6cbe-411d-a0fd-d77a6d6f36b7] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 991.009146] env[62503]: DEBUG oslo_vmware.api [None req-559d326e-c9ab-4657-aa3f-f3c3544de7c8 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Task: {'id': task-1388099, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.511238} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 991.009472] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-559d326e-c9ab-4657-aa3f-f3c3544de7c8 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk to [datastore1] c1a41261-03d3-4dde-9b90-68bdec1a548b/c1a41261-03d3-4dde-9b90-68bdec1a548b.vmdk {{(pid=62503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 991.009662] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-559d326e-c9ab-4657-aa3f-f3c3544de7c8 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: c1a41261-03d3-4dde-9b90-68bdec1a548b] Extending root virtual disk to 1048576 {{(pid=62503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 991.009934] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a17123ea-db0c-4be2-a254-8757c7af55a0 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.016049] env[62503]: DEBUG oslo_vmware.api [None req-559d326e-c9ab-4657-aa3f-f3c3544de7c8 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Waiting for the task: (returnval){ [ 991.016049] env[62503]: value = "task-1388100" [ 991.016049] env[62503]: _type = "Task" [ 991.016049] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 991.023249] env[62503]: DEBUG oslo_vmware.api [None req-559d326e-c9ab-4657-aa3f-f3c3544de7c8 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Task: {'id': task-1388100, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.056181] env[62503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-49712b64-abdb-4944-aeb7-2115e8a7af9e {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.065846] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f04003d2-d1a1-4e51-8ebd-8a2284a11ac1 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.082831] env[62503]: DEBUG oslo_concurrency.lockutils [None req-0d96d3a2-8cbc-4623-882b-2a659064bac0 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.715s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 991.091888] env[62503]: DEBUG oslo_concurrency.lockutils [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 4.271s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 991.092076] env[62503]: DEBUG oslo_concurrency.lockutils [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 991.092232] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62503) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 991.092637] env[62503]: DEBUG oslo_concurrency.lockutils [None req-d56402ad-0390-4eaa-b039-d48df2ed199a tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.105s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 991.093960] env[62503]: INFO nova.compute.claims [None req-d56402ad-0390-4eaa-b039-d48df2ed199a tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: 81545f5b-a9ba-4d58-aaca-62a2652a8102] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 991.096292] env[62503]: DEBUG nova.compute.manager [req-04b77012-9ad2-4518-afe5-d16646dc1654 req-ccc946fa-afc7-4899-9857-2a37cf6b0e3f service nova] [instance: ce9c4d77-6cbe-411d-a0fd-d77a6d6f36b7] Detach interface failed, port_id=228b554a-2e8c-4413-833d-65514ca1285a, reason: Instance ce9c4d77-6cbe-411d-a0fd-d77a6d6f36b7 could not be found. {{(pid=62503) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11261}} [ 991.097286] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-044e7749-a975-4db8-8a85-8307e864ffb9 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.104227] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13933e9f-eb33-4f51-aaf8-c2220520cf8c {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.111024] env[62503]: INFO nova.scheduler.client.report [None req-0d96d3a2-8cbc-4623-882b-2a659064bac0 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Deleted allocations for instance b120b29f-0e26-465f-bc6f-4214525ae2de [ 991.121430] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75b0a7b7-79b5-4b75-962c-27ea359b368e {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.128360] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-871c8f04-04ae-4108-a4a4-cc25d0fca195 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.157795] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180958MB free_disk=175GB free_vcpus=48 pci_devices=None {{(pid=62503) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 991.157955] env[62503]: DEBUG oslo_concurrency.lockutils [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 991.212166] env[62503]: INFO nova.compute.manager [-] [instance: ce9c4d77-6cbe-411d-a0fd-d77a6d6f36b7] Took 1.68 seconds to deallocate network for instance. [ 991.525818] env[62503]: DEBUG oslo_vmware.api [None req-559d326e-c9ab-4657-aa3f-f3c3544de7c8 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Task: {'id': task-1388100, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063196} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 991.526106] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-559d326e-c9ab-4657-aa3f-f3c3544de7c8 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: c1a41261-03d3-4dde-9b90-68bdec1a548b] Extended root virtual disk {{(pid=62503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 991.526852] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-943b3c7d-a0b1-4238-9c68-a67210391900 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.547691] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-559d326e-c9ab-4657-aa3f-f3c3544de7c8 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: c1a41261-03d3-4dde-9b90-68bdec1a548b] Reconfiguring VM instance instance-00000060 to attach disk [datastore1] c1a41261-03d3-4dde-9b90-68bdec1a548b/c1a41261-03d3-4dde-9b90-68bdec1a548b.vmdk or device None with type sparse {{(pid=62503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 991.547920] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-29baff6b-6a8b-4293-af2d-c93b54568186 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.567182] env[62503]: DEBUG oslo_vmware.api [None req-559d326e-c9ab-4657-aa3f-f3c3544de7c8 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Waiting for the task: (returnval){ [ 991.567182] env[62503]: value = "task-1388101" [ 991.567182] env[62503]: _type = "Task" [ 991.567182] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 991.574466] env[62503]: DEBUG oslo_vmware.api [None req-559d326e-c9ab-4657-aa3f-f3c3544de7c8 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Task: {'id': task-1388101, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.627744] env[62503]: DEBUG oslo_concurrency.lockutils [None req-0d96d3a2-8cbc-4623-882b-2a659064bac0 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Lock "b120b29f-0e26-465f-bc6f-4214525ae2de" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.071s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 991.718377] env[62503]: DEBUG oslo_concurrency.lockutils [None req-52f08c30-735d-4a21-9a98-99d91426f69e tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 992.077766] env[62503]: DEBUG oslo_vmware.api [None req-559d326e-c9ab-4657-aa3f-f3c3544de7c8 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Task: {'id': task-1388101, 'name': ReconfigVM_Task, 'duration_secs': 0.264817} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 992.078092] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-559d326e-c9ab-4657-aa3f-f3c3544de7c8 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: c1a41261-03d3-4dde-9b90-68bdec1a548b] Reconfigured VM instance instance-00000060 to attach disk [datastore1] c1a41261-03d3-4dde-9b90-68bdec1a548b/c1a41261-03d3-4dde-9b90-68bdec1a548b.vmdk or device None with type sparse {{(pid=62503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 992.078723] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b0c0a060-79a9-43d5-9c4c-03f9193c36c2 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.085703] env[62503]: DEBUG oslo_vmware.api [None req-559d326e-c9ab-4657-aa3f-f3c3544de7c8 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Waiting for the task: (returnval){ [ 992.085703] env[62503]: value = "task-1388102" [ 992.085703] env[62503]: _type = "Task" [ 992.085703] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 992.093742] env[62503]: DEBUG oslo_vmware.api [None req-559d326e-c9ab-4657-aa3f-f3c3544de7c8 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Task: {'id': task-1388102, 'name': Rename_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.186563] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bcac70e-9aea-4f53-95b1-013a80a21f36 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.193938] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36a4b7f4-2b9e-416f-af91-f057614444cb {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.224095] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f73d18a-4c51-4eec-8dae-13f1d29ff74f {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.231122] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fa003df-2a7a-42ed-9172-4c0d10042de0 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.243897] env[62503]: DEBUG nova.compute.provider_tree [None req-d56402ad-0390-4eaa-b039-d48df2ed199a tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 992.595865] env[62503]: DEBUG oslo_vmware.api [None req-559d326e-c9ab-4657-aa3f-f3c3544de7c8 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Task: {'id': task-1388102, 'name': Rename_Task, 'duration_secs': 0.131904} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 992.596169] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-559d326e-c9ab-4657-aa3f-f3c3544de7c8 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: c1a41261-03d3-4dde-9b90-68bdec1a548b] Powering on the VM {{(pid=62503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 992.596426] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0d3fb4ed-0214-48b4-a5df-d47c1b1d0259 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.602963] env[62503]: DEBUG oslo_vmware.api [None req-559d326e-c9ab-4657-aa3f-f3c3544de7c8 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Waiting for the task: (returnval){ [ 992.602963] env[62503]: value = "task-1388103" [ 992.602963] env[62503]: _type = "Task" [ 992.602963] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 992.610225] env[62503]: DEBUG oslo_vmware.api [None req-559d326e-c9ab-4657-aa3f-f3c3544de7c8 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Task: {'id': task-1388103, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.649505] env[62503]: DEBUG oslo_concurrency.lockutils [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Acquiring lock "c6961bd3-16fa-4476-9d9c-8e91f7c0bee3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 992.649673] env[62503]: DEBUG oslo_concurrency.lockutils [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Lock "c6961bd3-16fa-4476-9d9c-8e91f7c0bee3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 992.678259] env[62503]: DEBUG oslo_concurrency.lockutils [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Acquiring lock "9a792b9f-51c3-4cef-a3b8-1e81866433ce" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 992.678512] env[62503]: DEBUG oslo_concurrency.lockutils [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Lock "9a792b9f-51c3-4cef-a3b8-1e81866433ce" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 992.747123] env[62503]: DEBUG nova.scheduler.client.report [None req-d56402ad-0390-4eaa-b039-d48df2ed199a tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 993.113146] env[62503]: DEBUG oslo_vmware.api [None req-559d326e-c9ab-4657-aa3f-f3c3544de7c8 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Task: {'id': task-1388103, 'name': PowerOnVM_Task, 'duration_secs': 0.424792} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 993.113444] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-559d326e-c9ab-4657-aa3f-f3c3544de7c8 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: c1a41261-03d3-4dde-9b90-68bdec1a548b] Powered on the VM {{(pid=62503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 993.113655] env[62503]: INFO nova.compute.manager [None req-559d326e-c9ab-4657-aa3f-f3c3544de7c8 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: c1a41261-03d3-4dde-9b90-68bdec1a548b] Took 6.66 seconds to spawn the instance on the hypervisor. [ 993.113845] env[62503]: DEBUG nova.compute.manager [None req-559d326e-c9ab-4657-aa3f-f3c3544de7c8 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: c1a41261-03d3-4dde-9b90-68bdec1a548b] Checking state {{(pid=62503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1794}} [ 993.114641] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ce4b2bc-2f2e-4d99-bf4e-a51addaf8987 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.152209] env[62503]: DEBUG nova.compute.manager [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] [instance: c6961bd3-16fa-4476-9d9c-8e91f7c0bee3] Starting instance... {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 993.180474] env[62503]: DEBUG nova.compute.manager [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] [instance: 9a792b9f-51c3-4cef-a3b8-1e81866433ce] Starting instance... {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 993.252145] env[62503]: DEBUG oslo_concurrency.lockutils [None req-d56402ad-0390-4eaa-b039-d48df2ed199a tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.159s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 993.252703] env[62503]: DEBUG nova.compute.manager [None req-d56402ad-0390-4eaa-b039-d48df2ed199a tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: 81545f5b-a9ba-4d58-aaca-62a2652a8102] Start building networks asynchronously for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2832}} [ 993.255041] env[62503]: DEBUG oslo_concurrency.lockutils [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 2.097s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 993.630419] env[62503]: INFO nova.compute.manager [None req-559d326e-c9ab-4657-aa3f-f3c3544de7c8 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: c1a41261-03d3-4dde-9b90-68bdec1a548b] Took 12.88 seconds to build instance. [ 993.675981] env[62503]: DEBUG oslo_concurrency.lockutils [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 993.698267] env[62503]: DEBUG oslo_concurrency.lockutils [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 993.758601] env[62503]: DEBUG nova.compute.utils [None req-d56402ad-0390-4eaa-b039-d48df2ed199a tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Using /dev/sd instead of None {{(pid=62503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 993.763677] env[62503]: DEBUG nova.compute.manager [None req-d56402ad-0390-4eaa-b039-d48df2ed199a tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: 81545f5b-a9ba-4d58-aaca-62a2652a8102] Allocating IP information in the background. {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 993.763854] env[62503]: DEBUG nova.network.neutron [None req-d56402ad-0390-4eaa-b039-d48df2ed199a tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: 81545f5b-a9ba-4d58-aaca-62a2652a8102] allocate_for_instance() {{(pid=62503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 993.813272] env[62503]: DEBUG nova.policy [None req-d56402ad-0390-4eaa-b039-d48df2ed199a tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '05e8f1583c434155ae657655880ba2c6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '44139c74b4b349af996a67f408a8441f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62503) authorize /opt/stack/nova/nova/policy.py:201}} [ 994.065923] env[62503]: DEBUG nova.network.neutron [None req-d56402ad-0390-4eaa-b039-d48df2ed199a tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: 81545f5b-a9ba-4d58-aaca-62a2652a8102] Successfully created port: a4b23c78-e24e-457c-b40e-f72cc7324c9c {{(pid=62503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 994.132681] env[62503]: DEBUG oslo_concurrency.lockutils [None req-559d326e-c9ab-4657-aa3f-f3c3544de7c8 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Lock "c1a41261-03d3-4dde-9b90-68bdec1a548b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.402s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 994.264907] env[62503]: DEBUG nova.compute.manager [None req-d56402ad-0390-4eaa-b039-d48df2ed199a tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: 81545f5b-a9ba-4d58-aaca-62a2652a8102] Start building block device mappings for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2867}} [ 994.273966] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] [instance: c1a41261-03d3-4dde-9b90-68bdec1a548b] Skipping migration as instance is neither resizing nor live-migrating. {{(pid=62503) _update_usage_from_migrations /opt/stack/nova/nova/compute/resource_tracker.py:1563}} [ 994.293195] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Instance b6fddb0d-70f5-433f-a0ef-0d6bffb35579 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 994.293195] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Instance e693bcc2-3883-466d-913c-831146ca81e7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 994.293195] env[62503]: WARNING nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Instance ce9c4d77-6cbe-411d-a0fd-d77a6d6f36b7 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 994.293195] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Instance bba6c92b-cac3-4677-a8f4-57a2704fc685 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 994.293469] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Instance ccc542a3-ff01-42ca-965e-706bed4c6e07 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 994.293469] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Instance 81545f5b-a9ba-4d58-aaca-62a2652a8102 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 994.796929] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Instance c6961bd3-16fa-4476-9d9c-8e91f7c0bee3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 994.836783] env[62503]: DEBUG nova.compute.manager [None req-9eef127f-bea4-498c-b329-dcc167473ad0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: c1a41261-03d3-4dde-9b90-68bdec1a548b] Stashing vm_state: active {{(pid=62503) _prep_resize /opt/stack/nova/nova/compute/manager.py:5920}} [ 995.284164] env[62503]: DEBUG nova.compute.manager [None req-d56402ad-0390-4eaa-b039-d48df2ed199a tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: 81545f5b-a9ba-4d58-aaca-62a2652a8102] Start spawning the instance on the hypervisor. {{(pid=62503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2641}} [ 995.300095] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Instance 9a792b9f-51c3-4cef-a3b8-1e81866433ce has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 995.300276] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Migration b34ab829-d7d5-456a-a7c1-b0389514e668 is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 995.300381] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Instance c1a41261-03d3-4dde-9b90-68bdec1a548b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 995.300588] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Total usable vcpus: 48, total allocated vcpus: 6 {{(pid=62503) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 995.300728] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1664MB phys_disk=200GB used_disk=6GB total_vcpus=48 used_vcpus=6 pci_stats=[] {{(pid=62503) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 995.311935] env[62503]: DEBUG nova.virt.hardware [None req-d56402ad-0390-4eaa-b039-d48df2ed199a tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:26:20Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:26:03Z,direct_url=,disk_format='vmdk',id=8150ca02-f879-471d-8913-459408f127a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26d9ee75d25b4018b8bb1fb11c8bd98c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:26:04Z,virtual_size=,visibility=), allow threads: False {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 995.312221] env[62503]: DEBUG nova.virt.hardware [None req-d56402ad-0390-4eaa-b039-d48df2ed199a tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Flavor limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 995.312483] env[62503]: DEBUG nova.virt.hardware [None req-d56402ad-0390-4eaa-b039-d48df2ed199a tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Image limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 995.312558] env[62503]: DEBUG nova.virt.hardware [None req-d56402ad-0390-4eaa-b039-d48df2ed199a tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Flavor pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 995.312839] env[62503]: DEBUG nova.virt.hardware [None req-d56402ad-0390-4eaa-b039-d48df2ed199a tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Image pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 995.313019] env[62503]: DEBUG nova.virt.hardware [None req-d56402ad-0390-4eaa-b039-d48df2ed199a tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 995.313431] env[62503]: DEBUG nova.virt.hardware [None req-d56402ad-0390-4eaa-b039-d48df2ed199a tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 995.313431] env[62503]: DEBUG nova.virt.hardware [None req-d56402ad-0390-4eaa-b039-d48df2ed199a tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 995.313607] env[62503]: DEBUG nova.virt.hardware [None req-d56402ad-0390-4eaa-b039-d48df2ed199a tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Got 1 possible topologies {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 995.313812] env[62503]: DEBUG nova.virt.hardware [None req-d56402ad-0390-4eaa-b039-d48df2ed199a tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 995.314057] env[62503]: DEBUG nova.virt.hardware [None req-d56402ad-0390-4eaa-b039-d48df2ed199a tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 995.314948] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-268fe643-eef0-4162-8065-93681fa6b974 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.324784] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f876f7f9-f1d5-49fa-b1b7-a6ba913fc350 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.353984] env[62503]: DEBUG oslo_concurrency.lockutils [None req-9eef127f-bea4-498c-b329-dcc167473ad0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 995.434119] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edef7f69-721d-47d6-b91c-677f49a70fb9 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.441164] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c136f95-c263-4f52-8d45-94ba2132d9df {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.472260] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd60c14d-54e0-481e-ae9f-da29d322e4ef {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.475696] env[62503]: DEBUG nova.compute.manager [req-680569db-60d5-4fba-bb3e-dd7473cb3246 req-654018ad-6ae2-4ab6-ad81-d9a85de4481d service nova] [instance: 81545f5b-a9ba-4d58-aaca-62a2652a8102] Received event network-vif-plugged-a4b23c78-e24e-457c-b40e-f72cc7324c9c {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 995.475907] env[62503]: DEBUG oslo_concurrency.lockutils [req-680569db-60d5-4fba-bb3e-dd7473cb3246 req-654018ad-6ae2-4ab6-ad81-d9a85de4481d service nova] Acquiring lock "81545f5b-a9ba-4d58-aaca-62a2652a8102-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 995.476134] env[62503]: DEBUG oslo_concurrency.lockutils [req-680569db-60d5-4fba-bb3e-dd7473cb3246 req-654018ad-6ae2-4ab6-ad81-d9a85de4481d service nova] Lock "81545f5b-a9ba-4d58-aaca-62a2652a8102-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 995.476306] env[62503]: DEBUG oslo_concurrency.lockutils [req-680569db-60d5-4fba-bb3e-dd7473cb3246 req-654018ad-6ae2-4ab6-ad81-d9a85de4481d service nova] Lock "81545f5b-a9ba-4d58-aaca-62a2652a8102-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 995.476478] env[62503]: DEBUG nova.compute.manager [req-680569db-60d5-4fba-bb3e-dd7473cb3246 req-654018ad-6ae2-4ab6-ad81-d9a85de4481d service nova] [instance: 81545f5b-a9ba-4d58-aaca-62a2652a8102] No waiting events found dispatching network-vif-plugged-a4b23c78-e24e-457c-b40e-f72cc7324c9c {{(pid=62503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 995.476635] env[62503]: WARNING nova.compute.manager [req-680569db-60d5-4fba-bb3e-dd7473cb3246 req-654018ad-6ae2-4ab6-ad81-d9a85de4481d service nova] [instance: 81545f5b-a9ba-4d58-aaca-62a2652a8102] Received unexpected event network-vif-plugged-a4b23c78-e24e-457c-b40e-f72cc7324c9c for instance with vm_state building and task_state spawning. [ 995.484555] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da1721f5-6fab-4e4e-8161-03f475aa014d {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.498841] env[62503]: DEBUG nova.compute.provider_tree [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 995.555288] env[62503]: DEBUG nova.network.neutron [None req-d56402ad-0390-4eaa-b039-d48df2ed199a tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: 81545f5b-a9ba-4d58-aaca-62a2652a8102] Successfully updated port: a4b23c78-e24e-457c-b40e-f72cc7324c9c {{(pid=62503) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 996.001935] env[62503]: DEBUG nova.scheduler.client.report [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 996.057391] env[62503]: DEBUG oslo_concurrency.lockutils [None req-d56402ad-0390-4eaa-b039-d48df2ed199a tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Acquiring lock "refresh_cache-81545f5b-a9ba-4d58-aaca-62a2652a8102" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 996.057509] env[62503]: DEBUG oslo_concurrency.lockutils [None req-d56402ad-0390-4eaa-b039-d48df2ed199a tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Acquired lock "refresh_cache-81545f5b-a9ba-4d58-aaca-62a2652a8102" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 996.057673] env[62503]: DEBUG nova.network.neutron [None req-d56402ad-0390-4eaa-b039-d48df2ed199a tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: 81545f5b-a9ba-4d58-aaca-62a2652a8102] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 996.506694] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62503) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 996.507096] env[62503]: DEBUG oslo_concurrency.lockutils [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.252s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 996.507215] env[62503]: DEBUG oslo_concurrency.lockutils [None req-52f08c30-735d-4a21-9a98-99d91426f69e tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.789s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 996.507407] env[62503]: DEBUG oslo_concurrency.lockutils [None req-52f08c30-735d-4a21-9a98-99d91426f69e tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 996.509505] env[62503]: DEBUG oslo_concurrency.lockutils [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.834s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 996.510949] env[62503]: INFO nova.compute.claims [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] [instance: c6961bd3-16fa-4476-9d9c-8e91f7c0bee3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 996.534431] env[62503]: INFO nova.scheduler.client.report [None req-52f08c30-735d-4a21-9a98-99d91426f69e tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Deleted allocations for instance ce9c4d77-6cbe-411d-a0fd-d77a6d6f36b7 [ 996.588043] env[62503]: DEBUG nova.network.neutron [None req-d56402ad-0390-4eaa-b039-d48df2ed199a tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: 81545f5b-a9ba-4d58-aaca-62a2652a8102] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 996.707187] env[62503]: DEBUG nova.network.neutron [None req-d56402ad-0390-4eaa-b039-d48df2ed199a tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: 81545f5b-a9ba-4d58-aaca-62a2652a8102] Updating instance_info_cache with network_info: [{"id": "a4b23c78-e24e-457c-b40e-f72cc7324c9c", "address": "fa:16:3e:41:fe:bb", "network": {"id": "ae12a8a2-c7af-40e0-878a-95b9a17d8356", "bridge": "br-int", "label": "tempest-ServersTestJSON-1776284685-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "44139c74b4b349af996a67f408a8441f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51ae336c-12cf-406a-b1ca-54e9ce553b3e", "external-id": "nsx-vlan-transportzone-30", "segmentation_id": 30, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa4b23c78-e2", "ovs_interfaceid": "a4b23c78-e24e-457c-b40e-f72cc7324c9c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 997.043188] env[62503]: DEBUG oslo_concurrency.lockutils [None req-52f08c30-735d-4a21-9a98-99d91426f69e tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Lock "ce9c4d77-6cbe-411d-a0fd-d77a6d6f36b7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.622s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 997.210914] env[62503]: DEBUG oslo_concurrency.lockutils [None req-d56402ad-0390-4eaa-b039-d48df2ed199a tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Releasing lock "refresh_cache-81545f5b-a9ba-4d58-aaca-62a2652a8102" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 997.211258] env[62503]: DEBUG nova.compute.manager [None req-d56402ad-0390-4eaa-b039-d48df2ed199a tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: 81545f5b-a9ba-4d58-aaca-62a2652a8102] Instance network_info: |[{"id": "a4b23c78-e24e-457c-b40e-f72cc7324c9c", "address": "fa:16:3e:41:fe:bb", "network": {"id": "ae12a8a2-c7af-40e0-878a-95b9a17d8356", "bridge": "br-int", "label": "tempest-ServersTestJSON-1776284685-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "44139c74b4b349af996a67f408a8441f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51ae336c-12cf-406a-b1ca-54e9ce553b3e", "external-id": "nsx-vlan-transportzone-30", "segmentation_id": 30, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa4b23c78-e2", "ovs_interfaceid": "a4b23c78-e24e-457c-b40e-f72cc7324c9c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1999}} [ 997.211753] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-d56402ad-0390-4eaa-b039-d48df2ed199a tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: 81545f5b-a9ba-4d58-aaca-62a2652a8102] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:41:fe:bb', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '51ae336c-12cf-406a-b1ca-54e9ce553b3e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a4b23c78-e24e-457c-b40e-f72cc7324c9c', 'vif_model': 'vmxnet3'}] {{(pid=62503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 997.219453] env[62503]: DEBUG oslo.service.loopingcall [None req-d56402ad-0390-4eaa-b039-d48df2ed199a tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 997.219709] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 81545f5b-a9ba-4d58-aaca-62a2652a8102] Creating VM on the ESX host {{(pid=62503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 997.221770] env[62503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5eea17f4-9765-46b8-884b-4aad42cf64bb {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.240681] env[62503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 997.240681] env[62503]: value = "task-1388104" [ 997.240681] env[62503]: _type = "Task" [ 997.240681] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 997.248712] env[62503]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388104, 'name': CreateVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.486290] env[62503]: DEBUG nova.compute.manager [req-bc9abdba-2d7c-4022-8e76-2fb4fb80ff90 req-f9853fd4-b0d5-4606-8ed8-74fdfdb391d1 service nova] [instance: 81545f5b-a9ba-4d58-aaca-62a2652a8102] Received event network-changed-a4b23c78-e24e-457c-b40e-f72cc7324c9c {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 997.486442] env[62503]: DEBUG nova.compute.manager [req-bc9abdba-2d7c-4022-8e76-2fb4fb80ff90 req-f9853fd4-b0d5-4606-8ed8-74fdfdb391d1 service nova] [instance: 81545f5b-a9ba-4d58-aaca-62a2652a8102] Refreshing instance network info cache due to event network-changed-a4b23c78-e24e-457c-b40e-f72cc7324c9c. {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11432}} [ 997.486664] env[62503]: DEBUG oslo_concurrency.lockutils [req-bc9abdba-2d7c-4022-8e76-2fb4fb80ff90 req-f9853fd4-b0d5-4606-8ed8-74fdfdb391d1 service nova] Acquiring lock "refresh_cache-81545f5b-a9ba-4d58-aaca-62a2652a8102" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 997.486917] env[62503]: DEBUG oslo_concurrency.lockutils [req-bc9abdba-2d7c-4022-8e76-2fb4fb80ff90 req-f9853fd4-b0d5-4606-8ed8-74fdfdb391d1 service nova] Acquired lock "refresh_cache-81545f5b-a9ba-4d58-aaca-62a2652a8102" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 997.487136] env[62503]: DEBUG nova.network.neutron [req-bc9abdba-2d7c-4022-8e76-2fb4fb80ff90 req-f9853fd4-b0d5-4606-8ed8-74fdfdb391d1 service nova] [instance: 81545f5b-a9ba-4d58-aaca-62a2652a8102] Refreshing network info cache for port a4b23c78-e24e-457c-b40e-f72cc7324c9c {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 997.625181] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9117af99-dee5-44a5-976f-d1b979296b8e {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.633794] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0353112-84bd-473d-85d1-f2cda23166be {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.663528] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-549e7007-671e-4b66-b09c-1811585c6e3f {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.670923] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-971449d5-a258-42ef-ac29-d1b695e54079 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.685036] env[62503]: DEBUG nova.compute.provider_tree [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 997.751162] env[62503]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388104, 'name': CreateVM_Task, 'duration_secs': 0.301447} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 997.751376] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 81545f5b-a9ba-4d58-aaca-62a2652a8102] Created VM on the ESX host {{(pid=62503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 997.752048] env[62503]: DEBUG oslo_concurrency.lockutils [None req-d56402ad-0390-4eaa-b039-d48df2ed199a tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 997.752229] env[62503]: DEBUG oslo_concurrency.lockutils [None req-d56402ad-0390-4eaa-b039-d48df2ed199a tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Acquired lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 997.752554] env[62503]: DEBUG oslo_concurrency.lockutils [None req-d56402ad-0390-4eaa-b039-d48df2ed199a tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 997.752831] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b7413929-3a88-4595-9628-214590b16174 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.758087] env[62503]: DEBUG oslo_vmware.api [None req-d56402ad-0390-4eaa-b039-d48df2ed199a tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Waiting for the task: (returnval){ [ 997.758087] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52fa0c4f-d64c-9bf6-7720-ccfe2b422d43" [ 997.758087] env[62503]: _type = "Task" [ 997.758087] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 997.765928] env[62503]: DEBUG oslo_vmware.api [None req-d56402ad-0390-4eaa-b039-d48df2ed199a tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52fa0c4f-d64c-9bf6-7720-ccfe2b422d43, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.772387] env[62503]: DEBUG oslo_concurrency.lockutils [None req-b8ce09be-2292-4c8e-a90f-ca34b45341f4 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Acquiring lock "e693bcc2-3883-466d-913c-831146ca81e7" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 997.772637] env[62503]: DEBUG oslo_concurrency.lockutils [None req-b8ce09be-2292-4c8e-a90f-ca34b45341f4 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Lock "e693bcc2-3883-466d-913c-831146ca81e7" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 998.188717] env[62503]: DEBUG nova.scheduler.client.report [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 998.242404] env[62503]: DEBUG nova.network.neutron [req-bc9abdba-2d7c-4022-8e76-2fb4fb80ff90 req-f9853fd4-b0d5-4606-8ed8-74fdfdb391d1 service nova] [instance: 81545f5b-a9ba-4d58-aaca-62a2652a8102] Updated VIF entry in instance network info cache for port a4b23c78-e24e-457c-b40e-f72cc7324c9c. {{(pid=62503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 998.242772] env[62503]: DEBUG nova.network.neutron [req-bc9abdba-2d7c-4022-8e76-2fb4fb80ff90 req-f9853fd4-b0d5-4606-8ed8-74fdfdb391d1 service nova] [instance: 81545f5b-a9ba-4d58-aaca-62a2652a8102] Updating instance_info_cache with network_info: [{"id": "a4b23c78-e24e-457c-b40e-f72cc7324c9c", "address": "fa:16:3e:41:fe:bb", "network": {"id": "ae12a8a2-c7af-40e0-878a-95b9a17d8356", "bridge": "br-int", "label": "tempest-ServersTestJSON-1776284685-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "44139c74b4b349af996a67f408a8441f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51ae336c-12cf-406a-b1ca-54e9ce553b3e", "external-id": "nsx-vlan-transportzone-30", "segmentation_id": 30, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa4b23c78-e2", "ovs_interfaceid": "a4b23c78-e24e-457c-b40e-f72cc7324c9c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 998.268104] env[62503]: DEBUG oslo_vmware.api [None req-d56402ad-0390-4eaa-b039-d48df2ed199a tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52fa0c4f-d64c-9bf6-7720-ccfe2b422d43, 'name': SearchDatastore_Task, 'duration_secs': 0.012157} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 998.268402] env[62503]: DEBUG oslo_concurrency.lockutils [None req-d56402ad-0390-4eaa-b039-d48df2ed199a tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Releasing lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 998.268638] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-d56402ad-0390-4eaa-b039-d48df2ed199a tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: 81545f5b-a9ba-4d58-aaca-62a2652a8102] Processing image 8150ca02-f879-471d-8913-459408f127a1 {{(pid=62503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 998.268878] env[62503]: DEBUG oslo_concurrency.lockutils [None req-d56402ad-0390-4eaa-b039-d48df2ed199a tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 998.269039] env[62503]: DEBUG oslo_concurrency.lockutils [None req-d56402ad-0390-4eaa-b039-d48df2ed199a tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Acquired lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 998.269223] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-d56402ad-0390-4eaa-b039-d48df2ed199a tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 998.269479] env[62503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-69584239-5c94-4ad7-a673-7ba245c1244c {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.276346] env[62503]: INFO nova.compute.manager [None req-b8ce09be-2292-4c8e-a90f-ca34b45341f4 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: e693bcc2-3883-466d-913c-831146ca81e7] Detaching volume 628fd04b-dba0-424f-9dcd-1fc8f4074db1 [ 998.279045] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-d56402ad-0390-4eaa-b039-d48df2ed199a tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 998.279213] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-d56402ad-0390-4eaa-b039-d48df2ed199a tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 998.280169] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f5eb17b9-40ac-40bb-b9cb-526532ed88f1 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.288347] env[62503]: DEBUG oslo_vmware.api [None req-d56402ad-0390-4eaa-b039-d48df2ed199a tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Waiting for the task: (returnval){ [ 998.288347] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]527e9ac7-4211-ee04-ae80-5c35bdc924db" [ 998.288347] env[62503]: _type = "Task" [ 998.288347] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 998.295631] env[62503]: DEBUG oslo_vmware.api [None req-d56402ad-0390-4eaa-b039-d48df2ed199a tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]527e9ac7-4211-ee04-ae80-5c35bdc924db, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.309082] env[62503]: INFO nova.virt.block_device [None req-b8ce09be-2292-4c8e-a90f-ca34b45341f4 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: e693bcc2-3883-466d-913c-831146ca81e7] Attempting to driver detach volume 628fd04b-dba0-424f-9dcd-1fc8f4074db1 from mountpoint /dev/sdb [ 998.309317] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-b8ce09be-2292-4c8e-a90f-ca34b45341f4 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: e693bcc2-3883-466d-913c-831146ca81e7] Volume detach. Driver type: vmdk {{(pid=62503) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 998.309506] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-b8ce09be-2292-4c8e-a90f-ca34b45341f4 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: e693bcc2-3883-466d-913c-831146ca81e7] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-294612', 'volume_id': '628fd04b-dba0-424f-9dcd-1fc8f4074db1', 'name': 'volume-628fd04b-dba0-424f-9dcd-1fc8f4074db1', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'e693bcc2-3883-466d-913c-831146ca81e7', 'attached_at': '', 'detached_at': '', 'volume_id': '628fd04b-dba0-424f-9dcd-1fc8f4074db1', 'serial': '628fd04b-dba0-424f-9dcd-1fc8f4074db1'} {{(pid=62503) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 998.310314] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdb0f0ef-b12a-4694-a99f-35ccefbdd2bf {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.331329] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c107230d-b56d-409d-b328-b353ac2d4d05 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.338024] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8279047-123b-4561-9b01-ec6f16d8fd38 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.357308] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07cb1a7e-f7d9-4a8b-8a72-daac44e47173 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.370914] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-b8ce09be-2292-4c8e-a90f-ca34b45341f4 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] The volume has not been displaced from its original location: [datastore2] volume-628fd04b-dba0-424f-9dcd-1fc8f4074db1/volume-628fd04b-dba0-424f-9dcd-1fc8f4074db1.vmdk. No consolidation needed. {{(pid=62503) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 998.375994] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-b8ce09be-2292-4c8e-a90f-ca34b45341f4 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: e693bcc2-3883-466d-913c-831146ca81e7] Reconfiguring VM instance instance-00000048 to detach disk 2001 {{(pid=62503) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 998.376266] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9462e8f9-9d6c-4660-8622-783561999b2c {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.393742] env[62503]: DEBUG oslo_vmware.api [None req-b8ce09be-2292-4c8e-a90f-ca34b45341f4 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Waiting for the task: (returnval){ [ 998.393742] env[62503]: value = "task-1388105" [ 998.393742] env[62503]: _type = "Task" [ 998.393742] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 998.401793] env[62503]: DEBUG oslo_vmware.api [None req-b8ce09be-2292-4c8e-a90f-ca34b45341f4 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Task: {'id': task-1388105, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.696596] env[62503]: DEBUG oslo_concurrency.lockutils [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.187s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 998.697091] env[62503]: DEBUG nova.compute.manager [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] [instance: c6961bd3-16fa-4476-9d9c-8e91f7c0bee3] Start building networks asynchronously for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2832}} [ 998.700508] env[62503]: DEBUG oslo_concurrency.lockutils [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.002s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 998.701755] env[62503]: INFO nova.compute.claims [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] [instance: 9a792b9f-51c3-4cef-a3b8-1e81866433ce] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 998.745454] env[62503]: DEBUG oslo_concurrency.lockutils [req-bc9abdba-2d7c-4022-8e76-2fb4fb80ff90 req-f9853fd4-b0d5-4606-8ed8-74fdfdb391d1 service nova] Releasing lock "refresh_cache-81545f5b-a9ba-4d58-aaca-62a2652a8102" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 998.797995] env[62503]: DEBUG oslo_vmware.api [None req-d56402ad-0390-4eaa-b039-d48df2ed199a tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]527e9ac7-4211-ee04-ae80-5c35bdc924db, 'name': SearchDatastore_Task, 'duration_secs': 0.008135} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 998.798763] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-204de738-8f63-49c1-9993-e4af136184fd {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.804058] env[62503]: DEBUG oslo_vmware.api [None req-d56402ad-0390-4eaa-b039-d48df2ed199a tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Waiting for the task: (returnval){ [ 998.804058] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]526f4075-7058-268c-0570-6bd747eee4bf" [ 998.804058] env[62503]: _type = "Task" [ 998.804058] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 998.811361] env[62503]: DEBUG oslo_vmware.api [None req-d56402ad-0390-4eaa-b039-d48df2ed199a tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]526f4075-7058-268c-0570-6bd747eee4bf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.902913] env[62503]: DEBUG oslo_vmware.api [None req-b8ce09be-2292-4c8e-a90f-ca34b45341f4 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Task: {'id': task-1388105, 'name': ReconfigVM_Task, 'duration_secs': 0.2156} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 998.903230] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-b8ce09be-2292-4c8e-a90f-ca34b45341f4 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: e693bcc2-3883-466d-913c-831146ca81e7] Reconfigured VM instance instance-00000048 to detach disk 2001 {{(pid=62503) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 998.908194] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-22cf9395-40fa-45bb-8d18-4e8d7c9e830d {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.922360] env[62503]: DEBUG oslo_vmware.api [None req-b8ce09be-2292-4c8e-a90f-ca34b45341f4 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Waiting for the task: (returnval){ [ 998.922360] env[62503]: value = "task-1388106" [ 998.922360] env[62503]: _type = "Task" [ 998.922360] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 998.929746] env[62503]: DEBUG oslo_vmware.api [None req-b8ce09be-2292-4c8e-a90f-ca34b45341f4 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Task: {'id': task-1388106, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.202475] env[62503]: DEBUG nova.compute.utils [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Using /dev/sd instead of None {{(pid=62503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 999.203911] env[62503]: DEBUG nova.compute.manager [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] [instance: c6961bd3-16fa-4476-9d9c-8e91f7c0bee3] Allocating IP information in the background. {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 999.204675] env[62503]: DEBUG nova.network.neutron [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] [instance: c6961bd3-16fa-4476-9d9c-8e91f7c0bee3] allocate_for_instance() {{(pid=62503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 999.266631] env[62503]: DEBUG nova.policy [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '111ad3ca820d4ab0a3ac466a24d9526d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c9b69dfe9a9a44188c612fd777341101', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62503) authorize /opt/stack/nova/nova/policy.py:201}} [ 999.313754] env[62503]: DEBUG oslo_vmware.api [None req-d56402ad-0390-4eaa-b039-d48df2ed199a tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]526f4075-7058-268c-0570-6bd747eee4bf, 'name': SearchDatastore_Task, 'duration_secs': 0.00825} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 999.314053] env[62503]: DEBUG oslo_concurrency.lockutils [None req-d56402ad-0390-4eaa-b039-d48df2ed199a tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Releasing lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 999.314322] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-d56402ad-0390-4eaa-b039-d48df2ed199a tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk to [datastore1] 81545f5b-a9ba-4d58-aaca-62a2652a8102/81545f5b-a9ba-4d58-aaca-62a2652a8102.vmdk {{(pid=62503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 999.314589] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9fca6763-721f-48fe-a475-b803035a02e2 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.321302] env[62503]: DEBUG oslo_vmware.api [None req-d56402ad-0390-4eaa-b039-d48df2ed199a tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Waiting for the task: (returnval){ [ 999.321302] env[62503]: value = "task-1388107" [ 999.321302] env[62503]: _type = "Task" [ 999.321302] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 999.328560] env[62503]: DEBUG oslo_vmware.api [None req-d56402ad-0390-4eaa-b039-d48df2ed199a tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Task: {'id': task-1388107, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.432022] env[62503]: DEBUG oslo_vmware.api [None req-b8ce09be-2292-4c8e-a90f-ca34b45341f4 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Task: {'id': task-1388106, 'name': ReconfigVM_Task, 'duration_secs': 0.157874} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 999.432322] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-b8ce09be-2292-4c8e-a90f-ca34b45341f4 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: e693bcc2-3883-466d-913c-831146ca81e7] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-294612', 'volume_id': '628fd04b-dba0-424f-9dcd-1fc8f4074db1', 'name': 'volume-628fd04b-dba0-424f-9dcd-1fc8f4074db1', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'e693bcc2-3883-466d-913c-831146ca81e7', 'attached_at': '', 'detached_at': '', 'volume_id': '628fd04b-dba0-424f-9dcd-1fc8f4074db1', 'serial': '628fd04b-dba0-424f-9dcd-1fc8f4074db1'} {{(pid=62503) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 999.502754] env[62503]: DEBUG nova.network.neutron [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] [instance: c6961bd3-16fa-4476-9d9c-8e91f7c0bee3] Successfully created port: 767e9e9f-4fd3-404b-b2e3-3cc22e7511bf {{(pid=62503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 999.708278] env[62503]: DEBUG nova.compute.manager [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] [instance: c6961bd3-16fa-4476-9d9c-8e91f7c0bee3] Start building block device mappings for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2867}} [ 999.837710] env[62503]: DEBUG oslo_vmware.api [None req-d56402ad-0390-4eaa-b039-d48df2ed199a tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Task: {'id': task-1388107, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.840437] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12ec547b-7679-4ae1-b28a-328b26550986 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.849613] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef3771e7-6d4f-44ad-b568-c8565e1b8cb3 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.897336] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b2c1442-c3b5-412d-a962-1e2bd7c2347f {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.906913] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34f420c5-35a9-4fa1-a1c4-7d371a2909e7 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.925691] env[62503]: DEBUG nova.compute.provider_tree [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 999.973625] env[62503]: DEBUG nova.objects.instance [None req-b8ce09be-2292-4c8e-a90f-ca34b45341f4 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Lazy-loading 'flavor' on Instance uuid e693bcc2-3883-466d-913c-831146ca81e7 {{(pid=62503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1000.333127] env[62503]: DEBUG oslo_vmware.api [None req-d56402ad-0390-4eaa-b039-d48df2ed199a tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Task: {'id': task-1388107, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.430410] env[62503]: DEBUG nova.scheduler.client.report [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1000.719418] env[62503]: DEBUG nova.compute.manager [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] [instance: c6961bd3-16fa-4476-9d9c-8e91f7c0bee3] Start spawning the instance on the hypervisor. {{(pid=62503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2641}} [ 1000.744130] env[62503]: DEBUG nova.virt.hardware [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:26:20Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:26:03Z,direct_url=,disk_format='vmdk',id=8150ca02-f879-471d-8913-459408f127a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26d9ee75d25b4018b8bb1fb11c8bd98c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:26:04Z,virtual_size=,visibility=), allow threads: False {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1000.744430] env[62503]: DEBUG nova.virt.hardware [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Flavor limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1000.744594] env[62503]: DEBUG nova.virt.hardware [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Image limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1000.744781] env[62503]: DEBUG nova.virt.hardware [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Flavor pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1000.744933] env[62503]: DEBUG nova.virt.hardware [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Image pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1000.745098] env[62503]: DEBUG nova.virt.hardware [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1000.745316] env[62503]: DEBUG nova.virt.hardware [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1000.745480] env[62503]: DEBUG nova.virt.hardware [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1000.745653] env[62503]: DEBUG nova.virt.hardware [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Got 1 possible topologies {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1000.745905] env[62503]: DEBUG nova.virt.hardware [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1000.746130] env[62503]: DEBUG nova.virt.hardware [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1000.747039] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c11a48b-503b-4b27-a7dc-303d862df621 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.755089] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58247e6e-454b-4797-8bd8-238d08598457 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.832646] env[62503]: DEBUG oslo_vmware.api [None req-d56402ad-0390-4eaa-b039-d48df2ed199a tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Task: {'id': task-1388107, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.167363} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1000.832915] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-d56402ad-0390-4eaa-b039-d48df2ed199a tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk to [datastore1] 81545f5b-a9ba-4d58-aaca-62a2652a8102/81545f5b-a9ba-4d58-aaca-62a2652a8102.vmdk {{(pid=62503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1000.833148] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-d56402ad-0390-4eaa-b039-d48df2ed199a tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: 81545f5b-a9ba-4d58-aaca-62a2652a8102] Extending root virtual disk to 1048576 {{(pid=62503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1000.833400] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f4332662-95e3-4e76-86bb-f8c2e82ff9c9 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.839902] env[62503]: DEBUG oslo_vmware.api [None req-d56402ad-0390-4eaa-b039-d48df2ed199a tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Waiting for the task: (returnval){ [ 1000.839902] env[62503]: value = "task-1388108" [ 1000.839902] env[62503]: _type = "Task" [ 1000.839902] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1000.847040] env[62503]: DEBUG oslo_vmware.api [None req-d56402ad-0390-4eaa-b039-d48df2ed199a tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Task: {'id': task-1388108, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.877821] env[62503]: DEBUG nova.compute.manager [req-0f3f4594-d96e-4c67-bb54-35a1cf4a5289 req-c3931149-55fc-4c3b-8f01-3e31c4eac222 service nova] [instance: c6961bd3-16fa-4476-9d9c-8e91f7c0bee3] Received event network-vif-plugged-767e9e9f-4fd3-404b-b2e3-3cc22e7511bf {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 1000.878076] env[62503]: DEBUG oslo_concurrency.lockutils [req-0f3f4594-d96e-4c67-bb54-35a1cf4a5289 req-c3931149-55fc-4c3b-8f01-3e31c4eac222 service nova] Acquiring lock "c6961bd3-16fa-4476-9d9c-8e91f7c0bee3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1000.878335] env[62503]: DEBUG oslo_concurrency.lockutils [req-0f3f4594-d96e-4c67-bb54-35a1cf4a5289 req-c3931149-55fc-4c3b-8f01-3e31c4eac222 service nova] Lock "c6961bd3-16fa-4476-9d9c-8e91f7c0bee3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1000.878524] env[62503]: DEBUG oslo_concurrency.lockutils [req-0f3f4594-d96e-4c67-bb54-35a1cf4a5289 req-c3931149-55fc-4c3b-8f01-3e31c4eac222 service nova] Lock "c6961bd3-16fa-4476-9d9c-8e91f7c0bee3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1000.878702] env[62503]: DEBUG nova.compute.manager [req-0f3f4594-d96e-4c67-bb54-35a1cf4a5289 req-c3931149-55fc-4c3b-8f01-3e31c4eac222 service nova] [instance: c6961bd3-16fa-4476-9d9c-8e91f7c0bee3] No waiting events found dispatching network-vif-plugged-767e9e9f-4fd3-404b-b2e3-3cc22e7511bf {{(pid=62503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1000.878896] env[62503]: WARNING nova.compute.manager [req-0f3f4594-d96e-4c67-bb54-35a1cf4a5289 req-c3931149-55fc-4c3b-8f01-3e31c4eac222 service nova] [instance: c6961bd3-16fa-4476-9d9c-8e91f7c0bee3] Received unexpected event network-vif-plugged-767e9e9f-4fd3-404b-b2e3-3cc22e7511bf for instance with vm_state building and task_state spawning. [ 1000.935425] env[62503]: DEBUG oslo_concurrency.lockutils [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.235s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1000.935958] env[62503]: DEBUG nova.compute.manager [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] [instance: 9a792b9f-51c3-4cef-a3b8-1e81866433ce] Start building networks asynchronously for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2832}} [ 1000.938477] env[62503]: DEBUG oslo_concurrency.lockutils [None req-9eef127f-bea4-498c-b329-dcc167473ad0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 5.585s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1000.965746] env[62503]: DEBUG nova.network.neutron [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] [instance: c6961bd3-16fa-4476-9d9c-8e91f7c0bee3] Successfully updated port: 767e9e9f-4fd3-404b-b2e3-3cc22e7511bf {{(pid=62503) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1000.982770] env[62503]: DEBUG oslo_concurrency.lockutils [None req-b8ce09be-2292-4c8e-a90f-ca34b45341f4 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Lock "e693bcc2-3883-466d-913c-831146ca81e7" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.210s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1001.350392] env[62503]: DEBUG oslo_vmware.api [None req-d56402ad-0390-4eaa-b039-d48df2ed199a tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Task: {'id': task-1388108, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06393} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1001.350720] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-d56402ad-0390-4eaa-b039-d48df2ed199a tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: 81545f5b-a9ba-4d58-aaca-62a2652a8102] Extended root virtual disk {{(pid=62503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1001.351506] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93fc284a-ba8b-442e-b99f-18f4964b2143 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.372690] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-d56402ad-0390-4eaa-b039-d48df2ed199a tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: 81545f5b-a9ba-4d58-aaca-62a2652a8102] Reconfiguring VM instance instance-00000061 to attach disk [datastore1] 81545f5b-a9ba-4d58-aaca-62a2652a8102/81545f5b-a9ba-4d58-aaca-62a2652a8102.vmdk or device None with type sparse {{(pid=62503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1001.372927] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-411693b9-4caf-41e2-a806-26cf4de55fa1 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.391168] env[62503]: DEBUG oslo_vmware.api [None req-d56402ad-0390-4eaa-b039-d48df2ed199a tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Waiting for the task: (returnval){ [ 1001.391168] env[62503]: value = "task-1388109" [ 1001.391168] env[62503]: _type = "Task" [ 1001.391168] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1001.398622] env[62503]: DEBUG oslo_vmware.api [None req-d56402ad-0390-4eaa-b039-d48df2ed199a tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Task: {'id': task-1388109, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.441413] env[62503]: DEBUG nova.compute.utils [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Using /dev/sd instead of None {{(pid=62503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1001.443051] env[62503]: DEBUG nova.compute.manager [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] [instance: 9a792b9f-51c3-4cef-a3b8-1e81866433ce] Allocating IP information in the background. {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 1001.443175] env[62503]: DEBUG nova.network.neutron [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] [instance: 9a792b9f-51c3-4cef-a3b8-1e81866433ce] allocate_for_instance() {{(pid=62503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1001.447851] env[62503]: INFO nova.compute.claims [None req-9eef127f-bea4-498c-b329-dcc167473ad0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: c1a41261-03d3-4dde-9b90-68bdec1a548b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1001.468486] env[62503]: DEBUG oslo_concurrency.lockutils [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Acquiring lock "refresh_cache-c6961bd3-16fa-4476-9d9c-8e91f7c0bee3" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1001.468692] env[62503]: DEBUG oslo_concurrency.lockutils [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Acquired lock "refresh_cache-c6961bd3-16fa-4476-9d9c-8e91f7c0bee3" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1001.468796] env[62503]: DEBUG nova.network.neutron [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] [instance: c6961bd3-16fa-4476-9d9c-8e91f7c0bee3] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1001.514866] env[62503]: DEBUG nova.policy [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '111ad3ca820d4ab0a3ac466a24d9526d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c9b69dfe9a9a44188c612fd777341101', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62503) authorize /opt/stack/nova/nova/policy.py:201}} [ 1001.862540] env[62503]: DEBUG nova.network.neutron [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] [instance: 9a792b9f-51c3-4cef-a3b8-1e81866433ce] Successfully created port: c58c8243-163d-4f88-b7b2-51ee586765d7 {{(pid=62503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1001.901670] env[62503]: DEBUG oslo_vmware.api [None req-d56402ad-0390-4eaa-b039-d48df2ed199a tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Task: {'id': task-1388109, 'name': ReconfigVM_Task, 'duration_secs': 0.261764} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1001.901849] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-d56402ad-0390-4eaa-b039-d48df2ed199a tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: 81545f5b-a9ba-4d58-aaca-62a2652a8102] Reconfigured VM instance instance-00000061 to attach disk [datastore1] 81545f5b-a9ba-4d58-aaca-62a2652a8102/81545f5b-a9ba-4d58-aaca-62a2652a8102.vmdk or device None with type sparse {{(pid=62503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1001.902513] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e31c270a-7a66-473a-812b-ea3f2fce22b1 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.908252] env[62503]: DEBUG oslo_vmware.api [None req-d56402ad-0390-4eaa-b039-d48df2ed199a tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Waiting for the task: (returnval){ [ 1001.908252] env[62503]: value = "task-1388110" [ 1001.908252] env[62503]: _type = "Task" [ 1001.908252] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1001.916122] env[62503]: DEBUG oslo_vmware.api [None req-d56402ad-0390-4eaa-b039-d48df2ed199a tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Task: {'id': task-1388110, 'name': Rename_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.953458] env[62503]: INFO nova.compute.resource_tracker [None req-9eef127f-bea4-498c-b329-dcc167473ad0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: c1a41261-03d3-4dde-9b90-68bdec1a548b] Updating resource usage from migration b34ab829-d7d5-456a-a7c1-b0389514e668 [ 1001.955989] env[62503]: DEBUG nova.compute.manager [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] [instance: 9a792b9f-51c3-4cef-a3b8-1e81866433ce] Start building block device mappings for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2867}} [ 1002.001976] env[62503]: DEBUG nova.network.neutron [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] [instance: c6961bd3-16fa-4476-9d9c-8e91f7c0bee3] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1002.097174] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2fe8fe2-1006-414a-bc97-5775c6c3e47e {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.105795] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84f4dd54-f93b-4e03-9365-8859c54a5228 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.138245] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ff9c91a-afec-4c60-8065-e55d37242368 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.145867] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15a77684-dfb5-4f9a-aa98-0d57f94419d0 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.159013] env[62503]: DEBUG nova.compute.provider_tree [None req-9eef127f-bea4-498c-b329-dcc167473ad0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1002.171252] env[62503]: DEBUG nova.network.neutron [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] [instance: c6961bd3-16fa-4476-9d9c-8e91f7c0bee3] Updating instance_info_cache with network_info: [{"id": "767e9e9f-4fd3-404b-b2e3-3cc22e7511bf", "address": "fa:16:3e:63:68:e6", "network": {"id": "7fbaee7b-bc73-4090-b263-cba558fcc93a", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1602059693-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c9b69dfe9a9a44188c612fd777341101", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "457c42cd-4ddb-4374-923e-d419b7f6eaff", "external-id": "nsx-vlan-transportzone-575", "segmentation_id": 575, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap767e9e9f-4f", "ovs_interfaceid": "767e9e9f-4fd3-404b-b2e3-3cc22e7511bf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1002.184080] env[62503]: DEBUG oslo_concurrency.lockutils [None req-80a44815-c54b-4af5-897b-7b96ca7cfff5 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Acquiring lock "e693bcc2-3883-466d-913c-831146ca81e7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1002.184390] env[62503]: DEBUG oslo_concurrency.lockutils [None req-80a44815-c54b-4af5-897b-7b96ca7cfff5 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Lock "e693bcc2-3883-466d-913c-831146ca81e7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1002.184634] env[62503]: DEBUG oslo_concurrency.lockutils [None req-80a44815-c54b-4af5-897b-7b96ca7cfff5 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Acquiring lock "e693bcc2-3883-466d-913c-831146ca81e7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1002.184852] env[62503]: DEBUG oslo_concurrency.lockutils [None req-80a44815-c54b-4af5-897b-7b96ca7cfff5 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Lock "e693bcc2-3883-466d-913c-831146ca81e7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1002.185073] env[62503]: DEBUG oslo_concurrency.lockutils [None req-80a44815-c54b-4af5-897b-7b96ca7cfff5 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Lock "e693bcc2-3883-466d-913c-831146ca81e7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1002.187361] env[62503]: INFO nova.compute.manager [None req-80a44815-c54b-4af5-897b-7b96ca7cfff5 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: e693bcc2-3883-466d-913c-831146ca81e7] Terminating instance [ 1002.189138] env[62503]: DEBUG nova.compute.manager [None req-80a44815-c54b-4af5-897b-7b96ca7cfff5 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: e693bcc2-3883-466d-913c-831146ca81e7] Start destroying the instance on the hypervisor. {{(pid=62503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3156}} [ 1002.189340] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-80a44815-c54b-4af5-897b-7b96ca7cfff5 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: e693bcc2-3883-466d-913c-831146ca81e7] Destroying instance {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1002.190468] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c05f36f-307c-47b5-94ea-70123e306640 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.198186] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-80a44815-c54b-4af5-897b-7b96ca7cfff5 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: e693bcc2-3883-466d-913c-831146ca81e7] Powering off the VM {{(pid=62503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1002.198430] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-de96f6ee-198e-4911-9be7-5f8b40030dd0 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.204548] env[62503]: DEBUG oslo_vmware.api [None req-80a44815-c54b-4af5-897b-7b96ca7cfff5 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Waiting for the task: (returnval){ [ 1002.204548] env[62503]: value = "task-1388111" [ 1002.204548] env[62503]: _type = "Task" [ 1002.204548] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1002.212786] env[62503]: DEBUG oslo_vmware.api [None req-80a44815-c54b-4af5-897b-7b96ca7cfff5 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Task: {'id': task-1388111, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.418392] env[62503]: DEBUG oslo_vmware.api [None req-d56402ad-0390-4eaa-b039-d48df2ed199a tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Task: {'id': task-1388110, 'name': Rename_Task, 'duration_secs': 0.340077} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1002.418602] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-d56402ad-0390-4eaa-b039-d48df2ed199a tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: 81545f5b-a9ba-4d58-aaca-62a2652a8102] Powering on the VM {{(pid=62503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1002.418795] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-dcc3f872-4ff1-4a7a-9fb0-b32782a3582c {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.429976] env[62503]: DEBUG oslo_vmware.api [None req-d56402ad-0390-4eaa-b039-d48df2ed199a tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Waiting for the task: (returnval){ [ 1002.429976] env[62503]: value = "task-1388112" [ 1002.429976] env[62503]: _type = "Task" [ 1002.429976] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1002.438696] env[62503]: DEBUG oslo_vmware.api [None req-d56402ad-0390-4eaa-b039-d48df2ed199a tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Task: {'id': task-1388112, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.664193] env[62503]: DEBUG nova.scheduler.client.report [None req-9eef127f-bea4-498c-b329-dcc167473ad0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1002.674865] env[62503]: DEBUG oslo_concurrency.lockutils [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Releasing lock "refresh_cache-c6961bd3-16fa-4476-9d9c-8e91f7c0bee3" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1002.674865] env[62503]: DEBUG nova.compute.manager [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] [instance: c6961bd3-16fa-4476-9d9c-8e91f7c0bee3] Instance network_info: |[{"id": "767e9e9f-4fd3-404b-b2e3-3cc22e7511bf", "address": "fa:16:3e:63:68:e6", "network": {"id": "7fbaee7b-bc73-4090-b263-cba558fcc93a", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1602059693-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c9b69dfe9a9a44188c612fd777341101", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "457c42cd-4ddb-4374-923e-d419b7f6eaff", "external-id": "nsx-vlan-transportzone-575", "segmentation_id": 575, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap767e9e9f-4f", "ovs_interfaceid": "767e9e9f-4fd3-404b-b2e3-3cc22e7511bf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1999}} [ 1002.675114] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] [instance: c6961bd3-16fa-4476-9d9c-8e91f7c0bee3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:63:68:e6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '457c42cd-4ddb-4374-923e-d419b7f6eaff', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '767e9e9f-4fd3-404b-b2e3-3cc22e7511bf', 'vif_model': 'vmxnet3'}] {{(pid=62503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1002.682721] env[62503]: DEBUG oslo.service.loopingcall [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1002.683565] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c6961bd3-16fa-4476-9d9c-8e91f7c0bee3] Creating VM on the ESX host {{(pid=62503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1002.683809] env[62503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0e0b1e9a-bc69-4f18-8b44-6fbc53af5c05 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.703805] env[62503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1002.703805] env[62503]: value = "task-1388113" [ 1002.703805] env[62503]: _type = "Task" [ 1002.703805] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1002.717054] env[62503]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388113, 'name': CreateVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.720208] env[62503]: DEBUG oslo_vmware.api [None req-80a44815-c54b-4af5-897b-7b96ca7cfff5 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Task: {'id': task-1388111, 'name': PowerOffVM_Task, 'duration_secs': 0.212417} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1002.720451] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-80a44815-c54b-4af5-897b-7b96ca7cfff5 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: e693bcc2-3883-466d-913c-831146ca81e7] Powered off the VM {{(pid=62503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1002.720618] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-80a44815-c54b-4af5-897b-7b96ca7cfff5 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: e693bcc2-3883-466d-913c-831146ca81e7] Unregistering the VM {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1002.720851] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7496c829-7247-4c53-9cf1-6725e6679c3c {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.787454] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-80a44815-c54b-4af5-897b-7b96ca7cfff5 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: e693bcc2-3883-466d-913c-831146ca81e7] Unregistered the VM {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1002.787736] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-80a44815-c54b-4af5-897b-7b96ca7cfff5 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: e693bcc2-3883-466d-913c-831146ca81e7] Deleting contents of the VM from datastore datastore2 {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1002.788064] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-80a44815-c54b-4af5-897b-7b96ca7cfff5 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Deleting the datastore file [datastore2] e693bcc2-3883-466d-913c-831146ca81e7 {{(pid=62503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1002.788683] env[62503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a050d9cc-0881-49ba-8be1-0bc039034247 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.794637] env[62503]: DEBUG oslo_vmware.api [None req-80a44815-c54b-4af5-897b-7b96ca7cfff5 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Waiting for the task: (returnval){ [ 1002.794637] env[62503]: value = "task-1388115" [ 1002.794637] env[62503]: _type = "Task" [ 1002.794637] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1002.802481] env[62503]: DEBUG oslo_vmware.api [None req-80a44815-c54b-4af5-897b-7b96ca7cfff5 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Task: {'id': task-1388115, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.907451] env[62503]: DEBUG nova.compute.manager [req-2d0286d3-9db3-4363-a7e9-92596584ec62 req-2e47eab7-43bf-46fd-80d5-17b8a0db7492 service nova] [instance: c6961bd3-16fa-4476-9d9c-8e91f7c0bee3] Received event network-changed-767e9e9f-4fd3-404b-b2e3-3cc22e7511bf {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 1002.907736] env[62503]: DEBUG nova.compute.manager [req-2d0286d3-9db3-4363-a7e9-92596584ec62 req-2e47eab7-43bf-46fd-80d5-17b8a0db7492 service nova] [instance: c6961bd3-16fa-4476-9d9c-8e91f7c0bee3] Refreshing instance network info cache due to event network-changed-767e9e9f-4fd3-404b-b2e3-3cc22e7511bf. {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11432}} [ 1002.907873] env[62503]: DEBUG oslo_concurrency.lockutils [req-2d0286d3-9db3-4363-a7e9-92596584ec62 req-2e47eab7-43bf-46fd-80d5-17b8a0db7492 service nova] Acquiring lock "refresh_cache-c6961bd3-16fa-4476-9d9c-8e91f7c0bee3" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1002.908049] env[62503]: DEBUG oslo_concurrency.lockutils [req-2d0286d3-9db3-4363-a7e9-92596584ec62 req-2e47eab7-43bf-46fd-80d5-17b8a0db7492 service nova] Acquired lock "refresh_cache-c6961bd3-16fa-4476-9d9c-8e91f7c0bee3" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1002.908227] env[62503]: DEBUG nova.network.neutron [req-2d0286d3-9db3-4363-a7e9-92596584ec62 req-2e47eab7-43bf-46fd-80d5-17b8a0db7492 service nova] [instance: c6961bd3-16fa-4476-9d9c-8e91f7c0bee3] Refreshing network info cache for port 767e9e9f-4fd3-404b-b2e3-3cc22e7511bf {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1002.940859] env[62503]: DEBUG oslo_vmware.api [None req-d56402ad-0390-4eaa-b039-d48df2ed199a tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Task: {'id': task-1388112, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.965184] env[62503]: DEBUG nova.compute.manager [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] [instance: 9a792b9f-51c3-4cef-a3b8-1e81866433ce] Start spawning the instance on the hypervisor. {{(pid=62503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2641}} [ 1002.990545] env[62503]: DEBUG nova.virt.hardware [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:26:20Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:26:03Z,direct_url=,disk_format='vmdk',id=8150ca02-f879-471d-8913-459408f127a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26d9ee75d25b4018b8bb1fb11c8bd98c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:26:04Z,virtual_size=,visibility=), allow threads: False {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1002.990802] env[62503]: DEBUG nova.virt.hardware [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Flavor limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1002.990966] env[62503]: DEBUG nova.virt.hardware [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Image limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1002.991199] env[62503]: DEBUG nova.virt.hardware [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Flavor pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1002.991353] env[62503]: DEBUG nova.virt.hardware [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Image pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1002.991502] env[62503]: DEBUG nova.virt.hardware [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1002.991727] env[62503]: DEBUG nova.virt.hardware [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1002.991911] env[62503]: DEBUG nova.virt.hardware [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1002.992102] env[62503]: DEBUG nova.virt.hardware [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Got 1 possible topologies {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1002.992273] env[62503]: DEBUG nova.virt.hardware [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1002.992451] env[62503]: DEBUG nova.virt.hardware [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1002.993649] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a26e1d62-7dcd-4214-98ec-9a6c919368df {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.001167] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b394eebf-e6e0-4435-9e90-150116fde322 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.169433] env[62503]: DEBUG oslo_concurrency.lockutils [None req-9eef127f-bea4-498c-b329-dcc167473ad0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.231s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1003.169849] env[62503]: INFO nova.compute.manager [None req-9eef127f-bea4-498c-b329-dcc167473ad0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: c1a41261-03d3-4dde-9b90-68bdec1a548b] Migrating [ 1003.213024] env[62503]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388113, 'name': CreateVM_Task, 'duration_secs': 0.313188} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1003.213196] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c6961bd3-16fa-4476-9d9c-8e91f7c0bee3] Created VM on the ESX host {{(pid=62503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1003.213850] env[62503]: DEBUG oslo_concurrency.lockutils [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1003.214034] env[62503]: DEBUG oslo_concurrency.lockutils [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Acquired lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1003.214346] env[62503]: DEBUG oslo_concurrency.lockutils [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1003.214586] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-32c5bc81-860d-449c-a4e3-beaf6444cd3d {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.218926] env[62503]: DEBUG oslo_vmware.api [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Waiting for the task: (returnval){ [ 1003.218926] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52a6f46d-74c5-f0bd-d560-e4d1b776f378" [ 1003.218926] env[62503]: _type = "Task" [ 1003.218926] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1003.225836] env[62503]: DEBUG oslo_vmware.api [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52a6f46d-74c5-f0bd-d560-e4d1b776f378, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.304176] env[62503]: DEBUG oslo_vmware.api [None req-80a44815-c54b-4af5-897b-7b96ca7cfff5 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Task: {'id': task-1388115, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.140821} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1003.304460] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-80a44815-c54b-4af5-897b-7b96ca7cfff5 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Deleted the datastore file {{(pid=62503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1003.304644] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-80a44815-c54b-4af5-897b-7b96ca7cfff5 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: e693bcc2-3883-466d-913c-831146ca81e7] Deleted contents of the VM from datastore datastore2 {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1003.304826] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-80a44815-c54b-4af5-897b-7b96ca7cfff5 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: e693bcc2-3883-466d-913c-831146ca81e7] Instance destroyed {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1003.305016] env[62503]: INFO nova.compute.manager [None req-80a44815-c54b-4af5-897b-7b96ca7cfff5 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: e693bcc2-3883-466d-913c-831146ca81e7] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1003.305260] env[62503]: DEBUG oslo.service.loopingcall [None req-80a44815-c54b-4af5-897b-7b96ca7cfff5 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1003.305458] env[62503]: DEBUG nova.compute.manager [-] [instance: e693bcc2-3883-466d-913c-831146ca81e7] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 1003.305548] env[62503]: DEBUG nova.network.neutron [-] [instance: e693bcc2-3883-466d-913c-831146ca81e7] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1003.337333] env[62503]: DEBUG nova.network.neutron [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] [instance: 9a792b9f-51c3-4cef-a3b8-1e81866433ce] Successfully updated port: c58c8243-163d-4f88-b7b2-51ee586765d7 {{(pid=62503) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1003.442642] env[62503]: DEBUG oslo_vmware.api [None req-d56402ad-0390-4eaa-b039-d48df2ed199a tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Task: {'id': task-1388112, 'name': PowerOnVM_Task, 'duration_secs': 0.520505} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1003.444915] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-d56402ad-0390-4eaa-b039-d48df2ed199a tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: 81545f5b-a9ba-4d58-aaca-62a2652a8102] Powered on the VM {{(pid=62503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1003.445138] env[62503]: INFO nova.compute.manager [None req-d56402ad-0390-4eaa-b039-d48df2ed199a tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: 81545f5b-a9ba-4d58-aaca-62a2652a8102] Took 8.16 seconds to spawn the instance on the hypervisor. [ 1003.445320] env[62503]: DEBUG nova.compute.manager [None req-d56402ad-0390-4eaa-b039-d48df2ed199a tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: 81545f5b-a9ba-4d58-aaca-62a2652a8102] Checking state {{(pid=62503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1794}} [ 1003.446097] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b9bb004-e26e-41c2-81bf-40d378e58a05 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.608071] env[62503]: DEBUG nova.network.neutron [req-2d0286d3-9db3-4363-a7e9-92596584ec62 req-2e47eab7-43bf-46fd-80d5-17b8a0db7492 service nova] [instance: c6961bd3-16fa-4476-9d9c-8e91f7c0bee3] Updated VIF entry in instance network info cache for port 767e9e9f-4fd3-404b-b2e3-3cc22e7511bf. {{(pid=62503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1003.609010] env[62503]: DEBUG nova.network.neutron [req-2d0286d3-9db3-4363-a7e9-92596584ec62 req-2e47eab7-43bf-46fd-80d5-17b8a0db7492 service nova] [instance: c6961bd3-16fa-4476-9d9c-8e91f7c0bee3] Updating instance_info_cache with network_info: [{"id": "767e9e9f-4fd3-404b-b2e3-3cc22e7511bf", "address": "fa:16:3e:63:68:e6", "network": {"id": "7fbaee7b-bc73-4090-b263-cba558fcc93a", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1602059693-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c9b69dfe9a9a44188c612fd777341101", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "457c42cd-4ddb-4374-923e-d419b7f6eaff", "external-id": "nsx-vlan-transportzone-575", "segmentation_id": 575, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap767e9e9f-4f", "ovs_interfaceid": "767e9e9f-4fd3-404b-b2e3-3cc22e7511bf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1003.684165] env[62503]: DEBUG oslo_concurrency.lockutils [None req-9eef127f-bea4-498c-b329-dcc167473ad0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Acquiring lock "refresh_cache-c1a41261-03d3-4dde-9b90-68bdec1a548b" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1003.684165] env[62503]: DEBUG oslo_concurrency.lockutils [None req-9eef127f-bea4-498c-b329-dcc167473ad0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Acquired lock "refresh_cache-c1a41261-03d3-4dde-9b90-68bdec1a548b" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1003.684339] env[62503]: DEBUG nova.network.neutron [None req-9eef127f-bea4-498c-b329-dcc167473ad0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: c1a41261-03d3-4dde-9b90-68bdec1a548b] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1003.728847] env[62503]: DEBUG oslo_vmware.api [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52a6f46d-74c5-f0bd-d560-e4d1b776f378, 'name': SearchDatastore_Task, 'duration_secs': 0.201994} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1003.729158] env[62503]: DEBUG oslo_concurrency.lockutils [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Releasing lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1003.729393] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] [instance: c6961bd3-16fa-4476-9d9c-8e91f7c0bee3] Processing image 8150ca02-f879-471d-8913-459408f127a1 {{(pid=62503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1003.729655] env[62503]: DEBUG oslo_concurrency.lockutils [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1003.729815] env[62503]: DEBUG oslo_concurrency.lockutils [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Acquired lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1003.730008] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1003.730273] env[62503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-62c186b9-10e8-4fe5-8066-6a1b201d45c2 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.738158] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1003.738344] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1003.739329] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-475a137e-aa64-43a1-95c3-876b37557719 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.743857] env[62503]: DEBUG oslo_vmware.api [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Waiting for the task: (returnval){ [ 1003.743857] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52de4e92-2943-1e46-b1cc-ab05c3906d7b" [ 1003.743857] env[62503]: _type = "Task" [ 1003.743857] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1003.751852] env[62503]: DEBUG oslo_vmware.api [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52de4e92-2943-1e46-b1cc-ab05c3906d7b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.840484] env[62503]: DEBUG oslo_concurrency.lockutils [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Acquiring lock "refresh_cache-9a792b9f-51c3-4cef-a3b8-1e81866433ce" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1003.840778] env[62503]: DEBUG oslo_concurrency.lockutils [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Acquired lock "refresh_cache-9a792b9f-51c3-4cef-a3b8-1e81866433ce" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1003.840778] env[62503]: DEBUG nova.network.neutron [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] [instance: 9a792b9f-51c3-4cef-a3b8-1e81866433ce] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1003.890475] env[62503]: DEBUG nova.compute.manager [req-658aadc7-38e2-4736-ba00-f7b699b98f46 req-9ae8f493-a974-4796-bb8f-5284dd426a81 service nova] [instance: e693bcc2-3883-466d-913c-831146ca81e7] Received event network-vif-deleted-71aa781d-4a40-4f00-8fb8-06cb4c73986a {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 1003.890740] env[62503]: INFO nova.compute.manager [req-658aadc7-38e2-4736-ba00-f7b699b98f46 req-9ae8f493-a974-4796-bb8f-5284dd426a81 service nova] [instance: e693bcc2-3883-466d-913c-831146ca81e7] Neutron deleted interface 71aa781d-4a40-4f00-8fb8-06cb4c73986a; detaching it from the instance and deleting it from the info cache [ 1003.890990] env[62503]: DEBUG nova.network.neutron [req-658aadc7-38e2-4736-ba00-f7b699b98f46 req-9ae8f493-a974-4796-bb8f-5284dd426a81 service nova] [instance: e693bcc2-3883-466d-913c-831146ca81e7] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1003.966817] env[62503]: INFO nova.compute.manager [None req-d56402ad-0390-4eaa-b039-d48df2ed199a tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: 81545f5b-a9ba-4d58-aaca-62a2652a8102] Took 14.00 seconds to build instance. [ 1004.111670] env[62503]: DEBUG oslo_concurrency.lockutils [req-2d0286d3-9db3-4363-a7e9-92596584ec62 req-2e47eab7-43bf-46fd-80d5-17b8a0db7492 service nova] Releasing lock "refresh_cache-c6961bd3-16fa-4476-9d9c-8e91f7c0bee3" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1004.253847] env[62503]: DEBUG oslo_vmware.api [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52de4e92-2943-1e46-b1cc-ab05c3906d7b, 'name': SearchDatastore_Task, 'duration_secs': 0.009361} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1004.254624] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8e7ac616-ff55-4e22-be91-f532e692c57d {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.259755] env[62503]: DEBUG oslo_vmware.api [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Waiting for the task: (returnval){ [ 1004.259755] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]525f8ab3-3766-8033-7166-7b03be28c9dc" [ 1004.259755] env[62503]: _type = "Task" [ 1004.259755] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1004.267241] env[62503]: DEBUG oslo_vmware.api [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]525f8ab3-3766-8033-7166-7b03be28c9dc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.344837] env[62503]: DEBUG nova.network.neutron [-] [instance: e693bcc2-3883-466d-913c-831146ca81e7] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1004.374523] env[62503]: DEBUG nova.network.neutron [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] [instance: 9a792b9f-51c3-4cef-a3b8-1e81866433ce] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1004.394230] env[62503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9dec244c-fb16-49fd-98eb-337b2139195b {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.400878] env[62503]: DEBUG nova.network.neutron [None req-9eef127f-bea4-498c-b329-dcc167473ad0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: c1a41261-03d3-4dde-9b90-68bdec1a548b] Updating instance_info_cache with network_info: [{"id": "9d56fc29-052f-4ca8-908c-17b026450550", "address": "fa:16:3e:db:af:c1", "network": {"id": "fd4d9644-88d7-4c3d-9bee-9e25ca3a7d0b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2096443058-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f823912f7b1a4998a6dbc22060cf6c5e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "685b4083-b748-41fb-a68a-273b1073fa28", "external-id": "nsx-vlan-transportzone-312", "segmentation_id": 312, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9d56fc29-05", "ovs_interfaceid": "9d56fc29-052f-4ca8-908c-17b026450550", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1004.405797] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4ffc1a7-6e45-43cf-98eb-5f0894dd622f {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.436079] env[62503]: DEBUG nova.compute.manager [req-658aadc7-38e2-4736-ba00-f7b699b98f46 req-9ae8f493-a974-4796-bb8f-5284dd426a81 service nova] [instance: e693bcc2-3883-466d-913c-831146ca81e7] Detach interface failed, port_id=71aa781d-4a40-4f00-8fb8-06cb4c73986a, reason: Instance e693bcc2-3883-466d-913c-831146ca81e7 could not be found. {{(pid=62503) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11261}} [ 1004.469012] env[62503]: DEBUG oslo_concurrency.lockutils [None req-d56402ad-0390-4eaa-b039-d48df2ed199a tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Lock "81545f5b-a9ba-4d58-aaca-62a2652a8102" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.509s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1004.510770] env[62503]: DEBUG nova.network.neutron [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] [instance: 9a792b9f-51c3-4cef-a3b8-1e81866433ce] Updating instance_info_cache with network_info: [{"id": "c58c8243-163d-4f88-b7b2-51ee586765d7", "address": "fa:16:3e:0b:3a:cf", "network": {"id": "7fbaee7b-bc73-4090-b263-cba558fcc93a", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1602059693-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c9b69dfe9a9a44188c612fd777341101", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "457c42cd-4ddb-4374-923e-d419b7f6eaff", "external-id": "nsx-vlan-transportzone-575", "segmentation_id": 575, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc58c8243-16", "ovs_interfaceid": "c58c8243-163d-4f88-b7b2-51ee586765d7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1004.637390] env[62503]: DEBUG oslo_concurrency.lockutils [None req-74ce3633-94b5-4073-82ce-6bd09f280e6e tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Acquiring lock "81545f5b-a9ba-4d58-aaca-62a2652a8102" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1004.637830] env[62503]: DEBUG oslo_concurrency.lockutils [None req-74ce3633-94b5-4073-82ce-6bd09f280e6e tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Lock "81545f5b-a9ba-4d58-aaca-62a2652a8102" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1004.637900] env[62503]: DEBUG oslo_concurrency.lockutils [None req-74ce3633-94b5-4073-82ce-6bd09f280e6e tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Acquiring lock "81545f5b-a9ba-4d58-aaca-62a2652a8102-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1004.638068] env[62503]: DEBUG oslo_concurrency.lockutils [None req-74ce3633-94b5-4073-82ce-6bd09f280e6e tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Lock "81545f5b-a9ba-4d58-aaca-62a2652a8102-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1004.638248] env[62503]: DEBUG oslo_concurrency.lockutils [None req-74ce3633-94b5-4073-82ce-6bd09f280e6e tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Lock "81545f5b-a9ba-4d58-aaca-62a2652a8102-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1004.640442] env[62503]: INFO nova.compute.manager [None req-74ce3633-94b5-4073-82ce-6bd09f280e6e tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: 81545f5b-a9ba-4d58-aaca-62a2652a8102] Terminating instance [ 1004.642137] env[62503]: DEBUG nova.compute.manager [None req-74ce3633-94b5-4073-82ce-6bd09f280e6e tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: 81545f5b-a9ba-4d58-aaca-62a2652a8102] Start destroying the instance on the hypervisor. {{(pid=62503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3156}} [ 1004.642340] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-74ce3633-94b5-4073-82ce-6bd09f280e6e tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: 81545f5b-a9ba-4d58-aaca-62a2652a8102] Destroying instance {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1004.643184] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca84a77d-438b-49a6-8879-8ae1308370be {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.651248] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-74ce3633-94b5-4073-82ce-6bd09f280e6e tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: 81545f5b-a9ba-4d58-aaca-62a2652a8102] Powering off the VM {{(pid=62503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1004.651496] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bf713f0b-1913-4fb3-b3db-2280bbdb77c0 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.657175] env[62503]: DEBUG oslo_vmware.api [None req-74ce3633-94b5-4073-82ce-6bd09f280e6e tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Waiting for the task: (returnval){ [ 1004.657175] env[62503]: value = "task-1388116" [ 1004.657175] env[62503]: _type = "Task" [ 1004.657175] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1004.664325] env[62503]: DEBUG oslo_vmware.api [None req-74ce3633-94b5-4073-82ce-6bd09f280e6e tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Task: {'id': task-1388116, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.770155] env[62503]: DEBUG oslo_vmware.api [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]525f8ab3-3766-8033-7166-7b03be28c9dc, 'name': SearchDatastore_Task, 'duration_secs': 0.009047} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1004.770371] env[62503]: DEBUG oslo_concurrency.lockutils [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Releasing lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1004.770626] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk to [datastore1] c6961bd3-16fa-4476-9d9c-8e91f7c0bee3/c6961bd3-16fa-4476-9d9c-8e91f7c0bee3.vmdk {{(pid=62503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1004.770886] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6c23469d-d3e3-467f-a495-443693d9c0a4 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.777015] env[62503]: DEBUG oslo_vmware.api [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Waiting for the task: (returnval){ [ 1004.777015] env[62503]: value = "task-1388117" [ 1004.777015] env[62503]: _type = "Task" [ 1004.777015] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1004.785062] env[62503]: DEBUG oslo_vmware.api [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Task: {'id': task-1388117, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.847733] env[62503]: INFO nova.compute.manager [-] [instance: e693bcc2-3883-466d-913c-831146ca81e7] Took 1.54 seconds to deallocate network for instance. [ 1004.904618] env[62503]: DEBUG oslo_concurrency.lockutils [None req-9eef127f-bea4-498c-b329-dcc167473ad0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Releasing lock "refresh_cache-c1a41261-03d3-4dde-9b90-68bdec1a548b" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1004.940620] env[62503]: DEBUG nova.compute.manager [req-74736f7c-4469-4dcb-b113-781a034b75d0 req-5e07081a-afdf-419f-98ce-d61080c097b0 service nova] [instance: 9a792b9f-51c3-4cef-a3b8-1e81866433ce] Received event network-vif-plugged-c58c8243-163d-4f88-b7b2-51ee586765d7 {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 1004.940620] env[62503]: DEBUG oslo_concurrency.lockutils [req-74736f7c-4469-4dcb-b113-781a034b75d0 req-5e07081a-afdf-419f-98ce-d61080c097b0 service nova] Acquiring lock "9a792b9f-51c3-4cef-a3b8-1e81866433ce-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1004.940620] env[62503]: DEBUG oslo_concurrency.lockutils [req-74736f7c-4469-4dcb-b113-781a034b75d0 req-5e07081a-afdf-419f-98ce-d61080c097b0 service nova] Lock "9a792b9f-51c3-4cef-a3b8-1e81866433ce-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1004.940620] env[62503]: DEBUG oslo_concurrency.lockutils [req-74736f7c-4469-4dcb-b113-781a034b75d0 req-5e07081a-afdf-419f-98ce-d61080c097b0 service nova] Lock "9a792b9f-51c3-4cef-a3b8-1e81866433ce-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1004.941181] env[62503]: DEBUG nova.compute.manager [req-74736f7c-4469-4dcb-b113-781a034b75d0 req-5e07081a-afdf-419f-98ce-d61080c097b0 service nova] [instance: 9a792b9f-51c3-4cef-a3b8-1e81866433ce] No waiting events found dispatching network-vif-plugged-c58c8243-163d-4f88-b7b2-51ee586765d7 {{(pid=62503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1004.941181] env[62503]: WARNING nova.compute.manager [req-74736f7c-4469-4dcb-b113-781a034b75d0 req-5e07081a-afdf-419f-98ce-d61080c097b0 service nova] [instance: 9a792b9f-51c3-4cef-a3b8-1e81866433ce] Received unexpected event network-vif-plugged-c58c8243-163d-4f88-b7b2-51ee586765d7 for instance with vm_state building and task_state spawning. [ 1004.941895] env[62503]: DEBUG nova.compute.manager [req-74736f7c-4469-4dcb-b113-781a034b75d0 req-5e07081a-afdf-419f-98ce-d61080c097b0 service nova] [instance: 9a792b9f-51c3-4cef-a3b8-1e81866433ce] Received event network-changed-c58c8243-163d-4f88-b7b2-51ee586765d7 {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 1004.942223] env[62503]: DEBUG nova.compute.manager [req-74736f7c-4469-4dcb-b113-781a034b75d0 req-5e07081a-afdf-419f-98ce-d61080c097b0 service nova] [instance: 9a792b9f-51c3-4cef-a3b8-1e81866433ce] Refreshing instance network info cache due to event network-changed-c58c8243-163d-4f88-b7b2-51ee586765d7. {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11432}} [ 1004.942897] env[62503]: DEBUG oslo_concurrency.lockutils [req-74736f7c-4469-4dcb-b113-781a034b75d0 req-5e07081a-afdf-419f-98ce-d61080c097b0 service nova] Acquiring lock "refresh_cache-9a792b9f-51c3-4cef-a3b8-1e81866433ce" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1005.013571] env[62503]: DEBUG oslo_concurrency.lockutils [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Releasing lock "refresh_cache-9a792b9f-51c3-4cef-a3b8-1e81866433ce" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1005.013980] env[62503]: DEBUG nova.compute.manager [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] [instance: 9a792b9f-51c3-4cef-a3b8-1e81866433ce] Instance network_info: |[{"id": "c58c8243-163d-4f88-b7b2-51ee586765d7", "address": "fa:16:3e:0b:3a:cf", "network": {"id": "7fbaee7b-bc73-4090-b263-cba558fcc93a", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1602059693-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c9b69dfe9a9a44188c612fd777341101", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "457c42cd-4ddb-4374-923e-d419b7f6eaff", "external-id": "nsx-vlan-transportzone-575", "segmentation_id": 575, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc58c8243-16", "ovs_interfaceid": "c58c8243-163d-4f88-b7b2-51ee586765d7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1999}} [ 1005.014369] env[62503]: DEBUG oslo_concurrency.lockutils [req-74736f7c-4469-4dcb-b113-781a034b75d0 req-5e07081a-afdf-419f-98ce-d61080c097b0 service nova] Acquired lock "refresh_cache-9a792b9f-51c3-4cef-a3b8-1e81866433ce" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1005.014606] env[62503]: DEBUG nova.network.neutron [req-74736f7c-4469-4dcb-b113-781a034b75d0 req-5e07081a-afdf-419f-98ce-d61080c097b0 service nova] [instance: 9a792b9f-51c3-4cef-a3b8-1e81866433ce] Refreshing network info cache for port c58c8243-163d-4f88-b7b2-51ee586765d7 {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1005.015953] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] [instance: 9a792b9f-51c3-4cef-a3b8-1e81866433ce] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0b:3a:cf', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '457c42cd-4ddb-4374-923e-d419b7f6eaff', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c58c8243-163d-4f88-b7b2-51ee586765d7', 'vif_model': 'vmxnet3'}] {{(pid=62503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1005.023766] env[62503]: DEBUG oslo.service.loopingcall [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1005.026867] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9a792b9f-51c3-4cef-a3b8-1e81866433ce] Creating VM on the ESX host {{(pid=62503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1005.027373] env[62503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2f1fe35b-3159-4fc5-a031-b6e137a7b9ce {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.048810] env[62503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1005.048810] env[62503]: value = "task-1388118" [ 1005.048810] env[62503]: _type = "Task" [ 1005.048810] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1005.057336] env[62503]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388118, 'name': CreateVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.166880] env[62503]: DEBUG oslo_vmware.api [None req-74ce3633-94b5-4073-82ce-6bd09f280e6e tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Task: {'id': task-1388116, 'name': PowerOffVM_Task, 'duration_secs': 0.17684} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1005.167201] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-74ce3633-94b5-4073-82ce-6bd09f280e6e tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: 81545f5b-a9ba-4d58-aaca-62a2652a8102] Powered off the VM {{(pid=62503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1005.167431] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-74ce3633-94b5-4073-82ce-6bd09f280e6e tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: 81545f5b-a9ba-4d58-aaca-62a2652a8102] Unregistering the VM {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1005.167744] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7b3359d6-a6f0-4e16-b9ef-83bda2248d0d {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.231835] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-74ce3633-94b5-4073-82ce-6bd09f280e6e tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: 81545f5b-a9ba-4d58-aaca-62a2652a8102] Unregistered the VM {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1005.231835] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-74ce3633-94b5-4073-82ce-6bd09f280e6e tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: 81545f5b-a9ba-4d58-aaca-62a2652a8102] Deleting contents of the VM from datastore datastore1 {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1005.231835] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-74ce3633-94b5-4073-82ce-6bd09f280e6e tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Deleting the datastore file [datastore1] 81545f5b-a9ba-4d58-aaca-62a2652a8102 {{(pid=62503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1005.231835] env[62503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6105778b-ace1-4818-b5cf-d8c038f052eb {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.237321] env[62503]: DEBUG oslo_vmware.api [None req-74ce3633-94b5-4073-82ce-6bd09f280e6e tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Waiting for the task: (returnval){ [ 1005.237321] env[62503]: value = "task-1388120" [ 1005.237321] env[62503]: _type = "Task" [ 1005.237321] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1005.246957] env[62503]: DEBUG oslo_vmware.api [None req-74ce3633-94b5-4073-82ce-6bd09f280e6e tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Task: {'id': task-1388120, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.248125] env[62503]: DEBUG nova.network.neutron [req-74736f7c-4469-4dcb-b113-781a034b75d0 req-5e07081a-afdf-419f-98ce-d61080c097b0 service nova] [instance: 9a792b9f-51c3-4cef-a3b8-1e81866433ce] Updated VIF entry in instance network info cache for port c58c8243-163d-4f88-b7b2-51ee586765d7. {{(pid=62503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1005.248125] env[62503]: DEBUG nova.network.neutron [req-74736f7c-4469-4dcb-b113-781a034b75d0 req-5e07081a-afdf-419f-98ce-d61080c097b0 service nova] [instance: 9a792b9f-51c3-4cef-a3b8-1e81866433ce] Updating instance_info_cache with network_info: [{"id": "c58c8243-163d-4f88-b7b2-51ee586765d7", "address": "fa:16:3e:0b:3a:cf", "network": {"id": "7fbaee7b-bc73-4090-b263-cba558fcc93a", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1602059693-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c9b69dfe9a9a44188c612fd777341101", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "457c42cd-4ddb-4374-923e-d419b7f6eaff", "external-id": "nsx-vlan-transportzone-575", "segmentation_id": 575, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc58c8243-16", "ovs_interfaceid": "c58c8243-163d-4f88-b7b2-51ee586765d7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1005.289937] env[62503]: DEBUG oslo_vmware.api [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Task: {'id': task-1388117, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.394774} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1005.290270] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk to [datastore1] c6961bd3-16fa-4476-9d9c-8e91f7c0bee3/c6961bd3-16fa-4476-9d9c-8e91f7c0bee3.vmdk {{(pid=62503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1005.290528] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] [instance: c6961bd3-16fa-4476-9d9c-8e91f7c0bee3] Extending root virtual disk to 1048576 {{(pid=62503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1005.290856] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-87054b45-fb68-442a-acd2-9f21208710ca {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.299674] env[62503]: DEBUG oslo_vmware.api [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Waiting for the task: (returnval){ [ 1005.299674] env[62503]: value = "task-1388121" [ 1005.299674] env[62503]: _type = "Task" [ 1005.299674] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1005.309305] env[62503]: DEBUG oslo_vmware.api [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Task: {'id': task-1388121, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.354923] env[62503]: DEBUG oslo_concurrency.lockutils [None req-80a44815-c54b-4af5-897b-7b96ca7cfff5 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1005.355174] env[62503]: DEBUG oslo_concurrency.lockutils [None req-80a44815-c54b-4af5-897b-7b96ca7cfff5 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1005.355495] env[62503]: DEBUG nova.objects.instance [None req-80a44815-c54b-4af5-897b-7b96ca7cfff5 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Lazy-loading 'resources' on Instance uuid e693bcc2-3883-466d-913c-831146ca81e7 {{(pid=62503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1005.558320] env[62503]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388118, 'name': CreateVM_Task, 'duration_secs': 0.341866} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1005.558500] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9a792b9f-51c3-4cef-a3b8-1e81866433ce] Created VM on the ESX host {{(pid=62503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1005.559112] env[62503]: DEBUG oslo_concurrency.lockutils [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1005.559292] env[62503]: DEBUG oslo_concurrency.lockutils [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1005.559611] env[62503]: DEBUG oslo_concurrency.lockutils [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1005.559858] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4c315730-f2a5-41fc-8b73-4eed1c0de524 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.563946] env[62503]: DEBUG oslo_vmware.api [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Waiting for the task: (returnval){ [ 1005.563946] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]5262702d-49aa-8d2c-37e4-b5be5f623a34" [ 1005.563946] env[62503]: _type = "Task" [ 1005.563946] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1005.571172] env[62503]: DEBUG oslo_vmware.api [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]5262702d-49aa-8d2c-37e4-b5be5f623a34, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.747554] env[62503]: DEBUG oslo_vmware.api [None req-74ce3633-94b5-4073-82ce-6bd09f280e6e tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Task: {'id': task-1388120, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.15212} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1005.747808] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-74ce3633-94b5-4073-82ce-6bd09f280e6e tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Deleted the datastore file {{(pid=62503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1005.747999] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-74ce3633-94b5-4073-82ce-6bd09f280e6e tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: 81545f5b-a9ba-4d58-aaca-62a2652a8102] Deleted contents of the VM from datastore datastore1 {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1005.748207] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-74ce3633-94b5-4073-82ce-6bd09f280e6e tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: 81545f5b-a9ba-4d58-aaca-62a2652a8102] Instance destroyed {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1005.748385] env[62503]: INFO nova.compute.manager [None req-74ce3633-94b5-4073-82ce-6bd09f280e6e tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: 81545f5b-a9ba-4d58-aaca-62a2652a8102] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1005.748628] env[62503]: DEBUG oslo.service.loopingcall [None req-74ce3633-94b5-4073-82ce-6bd09f280e6e tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1005.748822] env[62503]: DEBUG nova.compute.manager [-] [instance: 81545f5b-a9ba-4d58-aaca-62a2652a8102] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 1005.748918] env[62503]: DEBUG nova.network.neutron [-] [instance: 81545f5b-a9ba-4d58-aaca-62a2652a8102] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1005.750781] env[62503]: DEBUG oslo_concurrency.lockutils [req-74736f7c-4469-4dcb-b113-781a034b75d0 req-5e07081a-afdf-419f-98ce-d61080c097b0 service nova] Releasing lock "refresh_cache-9a792b9f-51c3-4cef-a3b8-1e81866433ce" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1005.810689] env[62503]: DEBUG oslo_vmware.api [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Task: {'id': task-1388121, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.089385} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1005.810918] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] [instance: c6961bd3-16fa-4476-9d9c-8e91f7c0bee3] Extended root virtual disk {{(pid=62503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1005.811699] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22b483ed-4522-4705-83c2-5259fb0ee454 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.832589] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] [instance: c6961bd3-16fa-4476-9d9c-8e91f7c0bee3] Reconfiguring VM instance instance-00000062 to attach disk [datastore1] c6961bd3-16fa-4476-9d9c-8e91f7c0bee3/c6961bd3-16fa-4476-9d9c-8e91f7c0bee3.vmdk or device None with type sparse {{(pid=62503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1005.832837] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-89791bd1-1bbd-48ea-8a69-3939227f35c5 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.854653] env[62503]: DEBUG oslo_vmware.api [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Waiting for the task: (returnval){ [ 1005.854653] env[62503]: value = "task-1388122" [ 1005.854653] env[62503]: _type = "Task" [ 1005.854653] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1005.864660] env[62503]: DEBUG oslo_vmware.api [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Task: {'id': task-1388122, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.002194] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-475d5b3d-ccb9-41c0-8a8b-38d85727904e {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.010160] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9921aca1-670d-46c3-a855-776cd9c76ac5 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.040848] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0c77ac7-22b2-47d1-8f8e-45f83b3ee88c {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.048073] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e3449ed-24e0-499f-9b45-4ab4f77a323f {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.061086] env[62503]: DEBUG nova.compute.provider_tree [None req-80a44815-c54b-4af5-897b-7b96ca7cfff5 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1006.073259] env[62503]: DEBUG oslo_vmware.api [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]5262702d-49aa-8d2c-37e4-b5be5f623a34, 'name': SearchDatastore_Task, 'duration_secs': 0.008456} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1006.073564] env[62503]: DEBUG oslo_concurrency.lockutils [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1006.073797] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] [instance: 9a792b9f-51c3-4cef-a3b8-1e81866433ce] Processing image 8150ca02-f879-471d-8913-459408f127a1 {{(pid=62503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1006.074045] env[62503]: DEBUG oslo_concurrency.lockutils [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1006.074202] env[62503]: DEBUG oslo_concurrency.lockutils [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1006.074383] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1006.074630] env[62503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8eba1537-b5fc-41c8-b7ea-3df592631fe0 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.082480] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1006.082654] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1006.083380] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-75bbae6b-9044-4d2f-bd72-703b2623b7c0 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.088352] env[62503]: DEBUG oslo_vmware.api [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Waiting for the task: (returnval){ [ 1006.088352] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]5213d414-ff33-f863-cc34-2f9be5ba9dca" [ 1006.088352] env[62503]: _type = "Task" [ 1006.088352] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1006.095936] env[62503]: DEBUG oslo_vmware.api [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]5213d414-ff33-f863-cc34-2f9be5ba9dca, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.364630] env[62503]: DEBUG oslo_vmware.api [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Task: {'id': task-1388122, 'name': ReconfigVM_Task, 'duration_secs': 0.260173} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1006.364750] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] [instance: c6961bd3-16fa-4476-9d9c-8e91f7c0bee3] Reconfigured VM instance instance-00000062 to attach disk [datastore1] c6961bd3-16fa-4476-9d9c-8e91f7c0bee3/c6961bd3-16fa-4476-9d9c-8e91f7c0bee3.vmdk or device None with type sparse {{(pid=62503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1006.365352] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b73c5a97-348c-49e3-a442-4ad44db2d134 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.371909] env[62503]: DEBUG oslo_vmware.api [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Waiting for the task: (returnval){ [ 1006.371909] env[62503]: value = "task-1388123" [ 1006.371909] env[62503]: _type = "Task" [ 1006.371909] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1006.379617] env[62503]: DEBUG oslo_vmware.api [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Task: {'id': task-1388123, 'name': Rename_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.417803] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da51f089-27e2-4fe2-9832-499e6f0687f8 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.437525] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-9eef127f-bea4-498c-b329-dcc167473ad0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: c1a41261-03d3-4dde-9b90-68bdec1a548b] Updating instance 'c1a41261-03d3-4dde-9b90-68bdec1a548b' progress to 0 {{(pid=62503) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1006.485830] env[62503]: DEBUG nova.network.neutron [-] [instance: 81545f5b-a9ba-4d58-aaca-62a2652a8102] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1006.564142] env[62503]: DEBUG nova.scheduler.client.report [None req-80a44815-c54b-4af5-897b-7b96ca7cfff5 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1006.600834] env[62503]: DEBUG oslo_vmware.api [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]5213d414-ff33-f863-cc34-2f9be5ba9dca, 'name': SearchDatastore_Task, 'duration_secs': 0.008585} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1006.600834] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bc58bc91-4111-4a3e-b780-b0e0a4b53951 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.604047] env[62503]: DEBUG oslo_vmware.api [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Waiting for the task: (returnval){ [ 1006.604047] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]5269e81c-9326-5105-97a4-d7f90bd9dc09" [ 1006.604047] env[62503]: _type = "Task" [ 1006.604047] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1006.611119] env[62503]: DEBUG oslo_vmware.api [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]5269e81c-9326-5105-97a4-d7f90bd9dc09, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.881174] env[62503]: DEBUG oslo_vmware.api [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Task: {'id': task-1388123, 'name': Rename_Task, 'duration_secs': 0.135374} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1006.881478] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] [instance: c6961bd3-16fa-4476-9d9c-8e91f7c0bee3] Powering on the VM {{(pid=62503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1006.881765] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2c0e35e8-f6ba-4598-ac63-2ced78f3cd4e {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.888584] env[62503]: DEBUG oslo_vmware.api [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Waiting for the task: (returnval){ [ 1006.888584] env[62503]: value = "task-1388124" [ 1006.888584] env[62503]: _type = "Task" [ 1006.888584] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1006.896583] env[62503]: DEBUG oslo_vmware.api [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Task: {'id': task-1388124, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.943865] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-9eef127f-bea4-498c-b329-dcc167473ad0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: c1a41261-03d3-4dde-9b90-68bdec1a548b] Powering off the VM {{(pid=62503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1006.944251] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-81832ed6-1e4f-4f69-b867-5eba2bd968d9 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.950573] env[62503]: DEBUG oslo_vmware.api [None req-9eef127f-bea4-498c-b329-dcc167473ad0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Waiting for the task: (returnval){ [ 1006.950573] env[62503]: value = "task-1388125" [ 1006.950573] env[62503]: _type = "Task" [ 1006.950573] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1006.958637] env[62503]: DEBUG oslo_vmware.api [None req-9eef127f-bea4-498c-b329-dcc167473ad0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Task: {'id': task-1388125, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.972131] env[62503]: DEBUG nova.compute.manager [req-0ffa946b-2042-4169-9c7f-9b7ee108718e req-abc5d9b7-8f73-49b3-823f-c79b86606d44 service nova] [instance: 81545f5b-a9ba-4d58-aaca-62a2652a8102] Received event network-vif-deleted-a4b23c78-e24e-457c-b40e-f72cc7324c9c {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 1006.987718] env[62503]: INFO nova.compute.manager [-] [instance: 81545f5b-a9ba-4d58-aaca-62a2652a8102] Took 1.24 seconds to deallocate network for instance. [ 1007.069703] env[62503]: DEBUG oslo_concurrency.lockutils [None req-80a44815-c54b-4af5-897b-7b96ca7cfff5 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.714s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1007.092103] env[62503]: INFO nova.scheduler.client.report [None req-80a44815-c54b-4af5-897b-7b96ca7cfff5 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Deleted allocations for instance e693bcc2-3883-466d-913c-831146ca81e7 [ 1007.114589] env[62503]: DEBUG oslo_vmware.api [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]5269e81c-9326-5105-97a4-d7f90bd9dc09, 'name': SearchDatastore_Task, 'duration_secs': 0.009709} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1007.114869] env[62503]: DEBUG oslo_concurrency.lockutils [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1007.115158] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk to [datastore2] 9a792b9f-51c3-4cef-a3b8-1e81866433ce/9a792b9f-51c3-4cef-a3b8-1e81866433ce.vmdk {{(pid=62503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1007.115424] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e00de3e4-3b01-461e-8fd7-cd9c4ecd6731 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.122223] env[62503]: DEBUG oslo_vmware.api [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Waiting for the task: (returnval){ [ 1007.122223] env[62503]: value = "task-1388126" [ 1007.122223] env[62503]: _type = "Task" [ 1007.122223] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1007.130385] env[62503]: DEBUG oslo_vmware.api [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Task: {'id': task-1388126, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.402867] env[62503]: DEBUG oslo_vmware.api [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Task: {'id': task-1388124, 'name': PowerOnVM_Task, 'duration_secs': 0.479459} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1007.403444] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] [instance: c6961bd3-16fa-4476-9d9c-8e91f7c0bee3] Powered on the VM {{(pid=62503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1007.403856] env[62503]: INFO nova.compute.manager [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] [instance: c6961bd3-16fa-4476-9d9c-8e91f7c0bee3] Took 6.68 seconds to spawn the instance on the hypervisor. [ 1007.404267] env[62503]: DEBUG nova.compute.manager [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] [instance: c6961bd3-16fa-4476-9d9c-8e91f7c0bee3] Checking state {{(pid=62503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1794}} [ 1007.405343] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c9b2ff5-d638-416d-9445-4dc70103d502 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.465672] env[62503]: DEBUG oslo_vmware.api [None req-9eef127f-bea4-498c-b329-dcc167473ad0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Task: {'id': task-1388125, 'name': PowerOffVM_Task, 'duration_secs': 0.192363} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1007.465672] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-9eef127f-bea4-498c-b329-dcc167473ad0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: c1a41261-03d3-4dde-9b90-68bdec1a548b] Powered off the VM {{(pid=62503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1007.465672] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-9eef127f-bea4-498c-b329-dcc167473ad0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: c1a41261-03d3-4dde-9b90-68bdec1a548b] Updating instance 'c1a41261-03d3-4dde-9b90-68bdec1a548b' progress to 17 {{(pid=62503) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1007.493831] env[62503]: DEBUG oslo_concurrency.lockutils [None req-74ce3633-94b5-4073-82ce-6bd09f280e6e tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1007.494021] env[62503]: DEBUG oslo_concurrency.lockutils [None req-74ce3633-94b5-4073-82ce-6bd09f280e6e tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1007.494271] env[62503]: DEBUG nova.objects.instance [None req-74ce3633-94b5-4073-82ce-6bd09f280e6e tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Lazy-loading 'resources' on Instance uuid 81545f5b-a9ba-4d58-aaca-62a2652a8102 {{(pid=62503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1007.599806] env[62503]: DEBUG oslo_concurrency.lockutils [None req-80a44815-c54b-4af5-897b-7b96ca7cfff5 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Lock "e693bcc2-3883-466d-913c-831146ca81e7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.415s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1007.632681] env[62503]: DEBUG oslo_vmware.api [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Task: {'id': task-1388126, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.4952} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1007.632966] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk to [datastore2] 9a792b9f-51c3-4cef-a3b8-1e81866433ce/9a792b9f-51c3-4cef-a3b8-1e81866433ce.vmdk {{(pid=62503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1007.633244] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] [instance: 9a792b9f-51c3-4cef-a3b8-1e81866433ce] Extending root virtual disk to 1048576 {{(pid=62503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1007.633546] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9d3e857b-d2da-4af6-8458-4d9d556333c4 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.640010] env[62503]: DEBUG oslo_vmware.api [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Waiting for the task: (returnval){ [ 1007.640010] env[62503]: value = "task-1388127" [ 1007.640010] env[62503]: _type = "Task" [ 1007.640010] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1007.647850] env[62503]: DEBUG oslo_vmware.api [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Task: {'id': task-1388127, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.928220] env[62503]: INFO nova.compute.manager [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] [instance: c6961bd3-16fa-4476-9d9c-8e91f7c0bee3] Took 14.27 seconds to build instance. [ 1007.972984] env[62503]: DEBUG nova.virt.hardware [None req-9eef127f-bea4-498c-b329-dcc167473ad0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:26:22Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=8150ca02-f879-471d-8913-459408f127a1,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1007.972984] env[62503]: DEBUG nova.virt.hardware [None req-9eef127f-bea4-498c-b329-dcc167473ad0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Flavor limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1007.972984] env[62503]: DEBUG nova.virt.hardware [None req-9eef127f-bea4-498c-b329-dcc167473ad0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Image limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1007.972984] env[62503]: DEBUG nova.virt.hardware [None req-9eef127f-bea4-498c-b329-dcc167473ad0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Flavor pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1007.973223] env[62503]: DEBUG nova.virt.hardware [None req-9eef127f-bea4-498c-b329-dcc167473ad0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Image pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1007.973268] env[62503]: DEBUG nova.virt.hardware [None req-9eef127f-bea4-498c-b329-dcc167473ad0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1007.973844] env[62503]: DEBUG nova.virt.hardware [None req-9eef127f-bea4-498c-b329-dcc167473ad0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1007.973844] env[62503]: DEBUG nova.virt.hardware [None req-9eef127f-bea4-498c-b329-dcc167473ad0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1007.975041] env[62503]: DEBUG nova.virt.hardware [None req-9eef127f-bea4-498c-b329-dcc167473ad0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Got 1 possible topologies {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1007.975041] env[62503]: DEBUG nova.virt.hardware [None req-9eef127f-bea4-498c-b329-dcc167473ad0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1007.975041] env[62503]: DEBUG nova.virt.hardware [None req-9eef127f-bea4-498c-b329-dcc167473ad0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1007.979915] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9c37c91a-2a08-40b4-8051-5be73db0747e {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.998588] env[62503]: DEBUG oslo_vmware.api [None req-9eef127f-bea4-498c-b329-dcc167473ad0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Waiting for the task: (returnval){ [ 1007.998588] env[62503]: value = "task-1388128" [ 1007.998588] env[62503]: _type = "Task" [ 1007.998588] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1008.007400] env[62503]: DEBUG oslo_vmware.api [None req-9eef127f-bea4-498c-b329-dcc167473ad0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Task: {'id': task-1388128, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.114382] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1aea5d73-cf3f-497d-9323-af585e8398f1 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.121795] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb839896-28b7-4a57-ab4b-f55f7c836a09 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.153735] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4172feba-1623-46ed-b82a-ca76229f8cdf {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.160730] env[62503]: DEBUG oslo_vmware.api [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Task: {'id': task-1388127, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06275} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1008.162962] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] [instance: 9a792b9f-51c3-4cef-a3b8-1e81866433ce] Extended root virtual disk {{(pid=62503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1008.163861] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35a7d3e4-d11d-4250-8596-8ed85be48f85 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.167177] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c4c56e5-7d7f-467e-a271-89b09909d1d1 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.182034] env[62503]: DEBUG nova.compute.provider_tree [None req-74ce3633-94b5-4073-82ce-6bd09f280e6e tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1008.202171] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] [instance: 9a792b9f-51c3-4cef-a3b8-1e81866433ce] Reconfiguring VM instance instance-00000063 to attach disk [datastore2] 9a792b9f-51c3-4cef-a3b8-1e81866433ce/9a792b9f-51c3-4cef-a3b8-1e81866433ce.vmdk or device None with type sparse {{(pid=62503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1008.202710] env[62503]: DEBUG nova.scheduler.client.report [None req-74ce3633-94b5-4073-82ce-6bd09f280e6e tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1008.206522] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-232c72d2-2524-4978-ba2f-4a7a878ac450 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.221325] env[62503]: DEBUG oslo_concurrency.lockutils [None req-74ce3633-94b5-4073-82ce-6bd09f280e6e tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.727s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1008.229256] env[62503]: DEBUG oslo_vmware.api [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Waiting for the task: (returnval){ [ 1008.229256] env[62503]: value = "task-1388129" [ 1008.229256] env[62503]: _type = "Task" [ 1008.229256] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1008.238389] env[62503]: DEBUG oslo_vmware.api [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Task: {'id': task-1388129, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.248613] env[62503]: INFO nova.scheduler.client.report [None req-74ce3633-94b5-4073-82ce-6bd09f280e6e tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Deleted allocations for instance 81545f5b-a9ba-4d58-aaca-62a2652a8102 [ 1008.429766] env[62503]: DEBUG oslo_concurrency.lockutils [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Lock "c6961bd3-16fa-4476-9d9c-8e91f7c0bee3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.780s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1008.509603] env[62503]: DEBUG oslo_vmware.api [None req-9eef127f-bea4-498c-b329-dcc167473ad0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Task: {'id': task-1388128, 'name': ReconfigVM_Task, 'duration_secs': 0.150207} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1008.509974] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-9eef127f-bea4-498c-b329-dcc167473ad0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: c1a41261-03d3-4dde-9b90-68bdec1a548b] Updating instance 'c1a41261-03d3-4dde-9b90-68bdec1a548b' progress to 33 {{(pid=62503) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1008.738996] env[62503]: DEBUG oslo_vmware.api [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Task: {'id': task-1388129, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.758833] env[62503]: DEBUG oslo_concurrency.lockutils [None req-74ce3633-94b5-4073-82ce-6bd09f280e6e tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Lock "81545f5b-a9ba-4d58-aaca-62a2652a8102" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 4.120s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1009.016461] env[62503]: DEBUG nova.virt.hardware [None req-9eef127f-bea4-498c-b329-dcc167473ad0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:26:20Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=8150ca02-f879-471d-8913-459408f127a1,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1009.016711] env[62503]: DEBUG nova.virt.hardware [None req-9eef127f-bea4-498c-b329-dcc167473ad0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Flavor limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1009.016878] env[62503]: DEBUG nova.virt.hardware [None req-9eef127f-bea4-498c-b329-dcc167473ad0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Image limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1009.017087] env[62503]: DEBUG nova.virt.hardware [None req-9eef127f-bea4-498c-b329-dcc167473ad0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Flavor pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1009.017247] env[62503]: DEBUG nova.virt.hardware [None req-9eef127f-bea4-498c-b329-dcc167473ad0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Image pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1009.017402] env[62503]: DEBUG nova.virt.hardware [None req-9eef127f-bea4-498c-b329-dcc167473ad0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1009.017616] env[62503]: DEBUG nova.virt.hardware [None req-9eef127f-bea4-498c-b329-dcc167473ad0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1009.017781] env[62503]: DEBUG nova.virt.hardware [None req-9eef127f-bea4-498c-b329-dcc167473ad0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1009.017952] env[62503]: DEBUG nova.virt.hardware [None req-9eef127f-bea4-498c-b329-dcc167473ad0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Got 1 possible topologies {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1009.018143] env[62503]: DEBUG nova.virt.hardware [None req-9eef127f-bea4-498c-b329-dcc167473ad0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1009.018349] env[62503]: DEBUG nova.virt.hardware [None req-9eef127f-bea4-498c-b329-dcc167473ad0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1009.023684] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-9eef127f-bea4-498c-b329-dcc167473ad0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: c1a41261-03d3-4dde-9b90-68bdec1a548b] Reconfiguring VM instance instance-00000060 to detach disk 2000 {{(pid=62503) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1009.023968] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6531bcad-6709-48ab-865b-cd5039c7eaca {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.042545] env[62503]: DEBUG oslo_vmware.api [None req-9eef127f-bea4-498c-b329-dcc167473ad0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Waiting for the task: (returnval){ [ 1009.042545] env[62503]: value = "task-1388130" [ 1009.042545] env[62503]: _type = "Task" [ 1009.042545] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1009.050096] env[62503]: DEBUG oslo_vmware.api [None req-9eef127f-bea4-498c-b329-dcc167473ad0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Task: {'id': task-1388130, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.166615] env[62503]: DEBUG oslo_concurrency.lockutils [None req-f22de092-bce8-4b61-b91c-967b8220a556 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Acquiring lock "dd6341e2-cd68-4d12-80e7-51184d448764" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1009.166973] env[62503]: DEBUG oslo_concurrency.lockutils [None req-f22de092-bce8-4b61-b91c-967b8220a556 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Lock "dd6341e2-cd68-4d12-80e7-51184d448764" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1009.239120] env[62503]: DEBUG oslo_vmware.api [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Task: {'id': task-1388129, 'name': ReconfigVM_Task, 'duration_secs': 0.992194} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1009.239491] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] [instance: 9a792b9f-51c3-4cef-a3b8-1e81866433ce] Reconfigured VM instance instance-00000063 to attach disk [datastore2] 9a792b9f-51c3-4cef-a3b8-1e81866433ce/9a792b9f-51c3-4cef-a3b8-1e81866433ce.vmdk or device None with type sparse {{(pid=62503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1009.240095] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d9f26f96-2114-435e-b0ea-1504fb365012 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.245754] env[62503]: DEBUG oslo_vmware.api [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Waiting for the task: (returnval){ [ 1009.245754] env[62503]: value = "task-1388131" [ 1009.245754] env[62503]: _type = "Task" [ 1009.245754] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1009.253414] env[62503]: DEBUG oslo_vmware.api [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Task: {'id': task-1388131, 'name': Rename_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.445698] env[62503]: DEBUG oslo_concurrency.lockutils [None req-9414a127-a077-42ca-aa67-a6c8616f966f tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Acquiring lock "d811353d-a484-4c15-abfa-3ebbd37816fc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1009.445698] env[62503]: DEBUG oslo_concurrency.lockutils [None req-9414a127-a077-42ca-aa67-a6c8616f966f tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Lock "d811353d-a484-4c15-abfa-3ebbd37816fc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1009.551858] env[62503]: DEBUG oslo_vmware.api [None req-9eef127f-bea4-498c-b329-dcc167473ad0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Task: {'id': task-1388130, 'name': ReconfigVM_Task, 'duration_secs': 0.234185} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1009.552157] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-9eef127f-bea4-498c-b329-dcc167473ad0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: c1a41261-03d3-4dde-9b90-68bdec1a548b] Reconfigured VM instance instance-00000060 to detach disk 2000 {{(pid=62503) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1009.552921] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8152d21b-e35b-45ed-b1e4-23e751ade906 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.574235] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-9eef127f-bea4-498c-b329-dcc167473ad0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: c1a41261-03d3-4dde-9b90-68bdec1a548b] Reconfiguring VM instance instance-00000060 to attach disk [datastore1] c1a41261-03d3-4dde-9b90-68bdec1a548b/c1a41261-03d3-4dde-9b90-68bdec1a548b.vmdk or device None with type thin {{(pid=62503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1009.574445] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5c5f874f-7139-4d56-bdf3-7a89c28bd616 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.591990] env[62503]: DEBUG oslo_vmware.api [None req-9eef127f-bea4-498c-b329-dcc167473ad0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Waiting for the task: (returnval){ [ 1009.591990] env[62503]: value = "task-1388132" [ 1009.591990] env[62503]: _type = "Task" [ 1009.591990] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1009.599169] env[62503]: DEBUG oslo_vmware.api [None req-9eef127f-bea4-498c-b329-dcc167473ad0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Task: {'id': task-1388132, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.669072] env[62503]: DEBUG nova.compute.manager [None req-f22de092-bce8-4b61-b91c-967b8220a556 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: dd6341e2-cd68-4d12-80e7-51184d448764] Starting instance... {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 1009.755722] env[62503]: DEBUG oslo_vmware.api [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Task: {'id': task-1388131, 'name': Rename_Task, 'duration_secs': 0.149342} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1009.756688] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] [instance: 9a792b9f-51c3-4cef-a3b8-1e81866433ce] Powering on the VM {{(pid=62503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1009.756688] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-60225ed8-76d2-46eb-875f-acd5f4c24dc3 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.763429] env[62503]: DEBUG oslo_vmware.api [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Waiting for the task: (returnval){ [ 1009.763429] env[62503]: value = "task-1388133" [ 1009.763429] env[62503]: _type = "Task" [ 1009.763429] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1009.770943] env[62503]: DEBUG oslo_vmware.api [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Task: {'id': task-1388133, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.949622] env[62503]: DEBUG nova.compute.manager [None req-9414a127-a077-42ca-aa67-a6c8616f966f tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: d811353d-a484-4c15-abfa-3ebbd37816fc] Starting instance... {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 1010.102094] env[62503]: DEBUG oslo_vmware.api [None req-9eef127f-bea4-498c-b329-dcc167473ad0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Task: {'id': task-1388132, 'name': ReconfigVM_Task, 'duration_secs': 0.296933} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1010.102448] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-9eef127f-bea4-498c-b329-dcc167473ad0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: c1a41261-03d3-4dde-9b90-68bdec1a548b] Reconfigured VM instance instance-00000060 to attach disk [datastore1] c1a41261-03d3-4dde-9b90-68bdec1a548b/c1a41261-03d3-4dde-9b90-68bdec1a548b.vmdk or device None with type thin {{(pid=62503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1010.102768] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-9eef127f-bea4-498c-b329-dcc167473ad0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: c1a41261-03d3-4dde-9b90-68bdec1a548b] Updating instance 'c1a41261-03d3-4dde-9b90-68bdec1a548b' progress to 50 {{(pid=62503) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1010.193586] env[62503]: DEBUG oslo_concurrency.lockutils [None req-f22de092-bce8-4b61-b91c-967b8220a556 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1010.193917] env[62503]: DEBUG oslo_concurrency.lockutils [None req-f22de092-bce8-4b61-b91c-967b8220a556 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1010.195564] env[62503]: INFO nova.compute.claims [None req-f22de092-bce8-4b61-b91c-967b8220a556 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: dd6341e2-cd68-4d12-80e7-51184d448764] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1010.273666] env[62503]: DEBUG oslo_vmware.api [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Task: {'id': task-1388133, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.468799] env[62503]: DEBUG oslo_concurrency.lockutils [None req-9414a127-a077-42ca-aa67-a6c8616f966f tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1010.609474] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c58477df-dcbc-4610-ac65-c4a0382d49aa {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.630306] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e57f07cd-0662-47bb-84fc-241ff52af872 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.647354] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-9eef127f-bea4-498c-b329-dcc167473ad0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: c1a41261-03d3-4dde-9b90-68bdec1a548b] Updating instance 'c1a41261-03d3-4dde-9b90-68bdec1a548b' progress to 67 {{(pid=62503) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1010.774316] env[62503]: DEBUG oslo_vmware.api [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Task: {'id': task-1388133, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.188721] env[62503]: DEBUG nova.network.neutron [None req-9eef127f-bea4-498c-b329-dcc167473ad0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: c1a41261-03d3-4dde-9b90-68bdec1a548b] Port 9d56fc29-052f-4ca8-908c-17b026450550 binding to destination host cpu-1 is already ACTIVE {{(pid=62503) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1011.275978] env[62503]: DEBUG oslo_vmware.api [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Task: {'id': task-1388133, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.306788] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bba11396-1977-4816-9ea0-038c7e7178e3 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.312267] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-435c0c22-0691-44b1-bb7c-22f2ddf517de {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.341329] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e444b7a-7b10-4f4e-8d71-44d332d08856 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.348249] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de19a031-f9a0-4647-aea8-420a2b2fc863 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.361900] env[62503]: DEBUG nova.compute.provider_tree [None req-f22de092-bce8-4b61-b91c-967b8220a556 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1011.776492] env[62503]: DEBUG oslo_vmware.api [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Task: {'id': task-1388133, 'name': PowerOnVM_Task, 'duration_secs': 1.758821} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1011.776595] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] [instance: 9a792b9f-51c3-4cef-a3b8-1e81866433ce] Powered on the VM {{(pid=62503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1011.776769] env[62503]: INFO nova.compute.manager [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] [instance: 9a792b9f-51c3-4cef-a3b8-1e81866433ce] Took 8.81 seconds to spawn the instance on the hypervisor. [ 1011.776955] env[62503]: DEBUG nova.compute.manager [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] [instance: 9a792b9f-51c3-4cef-a3b8-1e81866433ce] Checking state {{(pid=62503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1794}} [ 1011.777734] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41320f91-f14c-4d35-be8f-03e43ca25bf8 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.864799] env[62503]: DEBUG nova.scheduler.client.report [None req-f22de092-bce8-4b61-b91c-967b8220a556 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1012.212723] env[62503]: DEBUG oslo_concurrency.lockutils [None req-9eef127f-bea4-498c-b329-dcc167473ad0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Acquiring lock "c1a41261-03d3-4dde-9b90-68bdec1a548b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1012.212723] env[62503]: DEBUG oslo_concurrency.lockutils [None req-9eef127f-bea4-498c-b329-dcc167473ad0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Lock "c1a41261-03d3-4dde-9b90-68bdec1a548b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1012.212975] env[62503]: DEBUG oslo_concurrency.lockutils [None req-9eef127f-bea4-498c-b329-dcc167473ad0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Lock "c1a41261-03d3-4dde-9b90-68bdec1a548b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1012.294562] env[62503]: INFO nova.compute.manager [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] [instance: 9a792b9f-51c3-4cef-a3b8-1e81866433ce] Took 18.61 seconds to build instance. [ 1012.369933] env[62503]: DEBUG oslo_concurrency.lockutils [None req-f22de092-bce8-4b61-b91c-967b8220a556 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.176s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1012.370483] env[62503]: DEBUG nova.compute.manager [None req-f22de092-bce8-4b61-b91c-967b8220a556 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: dd6341e2-cd68-4d12-80e7-51184d448764] Start building networks asynchronously for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2832}} [ 1012.373800] env[62503]: DEBUG oslo_concurrency.lockutils [None req-9414a127-a077-42ca-aa67-a6c8616f966f tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.905s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1012.375211] env[62503]: INFO nova.compute.claims [None req-9414a127-a077-42ca-aa67-a6c8616f966f tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: d811353d-a484-4c15-abfa-3ebbd37816fc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1012.581781] env[62503]: DEBUG oslo_concurrency.lockutils [None req-8df7546a-b1d3-40ff-b989-5cdabb388590 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Acquiring lock "c6961bd3-16fa-4476-9d9c-8e91f7c0bee3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1012.582184] env[62503]: DEBUG oslo_concurrency.lockutils [None req-8df7546a-b1d3-40ff-b989-5cdabb388590 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Lock "c6961bd3-16fa-4476-9d9c-8e91f7c0bee3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1012.582438] env[62503]: DEBUG oslo_concurrency.lockutils [None req-8df7546a-b1d3-40ff-b989-5cdabb388590 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Acquiring lock "c6961bd3-16fa-4476-9d9c-8e91f7c0bee3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1012.582644] env[62503]: DEBUG oslo_concurrency.lockutils [None req-8df7546a-b1d3-40ff-b989-5cdabb388590 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Lock "c6961bd3-16fa-4476-9d9c-8e91f7c0bee3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1012.582823] env[62503]: DEBUG oslo_concurrency.lockutils [None req-8df7546a-b1d3-40ff-b989-5cdabb388590 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Lock "c6961bd3-16fa-4476-9d9c-8e91f7c0bee3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1012.584980] env[62503]: INFO nova.compute.manager [None req-8df7546a-b1d3-40ff-b989-5cdabb388590 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] [instance: c6961bd3-16fa-4476-9d9c-8e91f7c0bee3] Terminating instance [ 1012.586678] env[62503]: DEBUG nova.compute.manager [None req-8df7546a-b1d3-40ff-b989-5cdabb388590 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] [instance: c6961bd3-16fa-4476-9d9c-8e91f7c0bee3] Start destroying the instance on the hypervisor. {{(pid=62503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3156}} [ 1012.586881] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-8df7546a-b1d3-40ff-b989-5cdabb388590 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] [instance: c6961bd3-16fa-4476-9d9c-8e91f7c0bee3] Destroying instance {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1012.587724] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4adc1c40-8555-4a32-bb3b-2b872552f4ac {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.595133] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-8df7546a-b1d3-40ff-b989-5cdabb388590 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] [instance: c6961bd3-16fa-4476-9d9c-8e91f7c0bee3] Powering off the VM {{(pid=62503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1012.595360] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-90512126-c980-4b71-8642-1c68ded67ab9 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.601632] env[62503]: DEBUG oslo_vmware.api [None req-8df7546a-b1d3-40ff-b989-5cdabb388590 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Waiting for the task: (returnval){ [ 1012.601632] env[62503]: value = "task-1388134" [ 1012.601632] env[62503]: _type = "Task" [ 1012.601632] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1012.608976] env[62503]: DEBUG oslo_vmware.api [None req-8df7546a-b1d3-40ff-b989-5cdabb388590 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Task: {'id': task-1388134, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.643657] env[62503]: DEBUG oslo_concurrency.lockutils [None req-92a5d236-2430-47a1-b059-b485c63a180c tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Acquiring lock "9a792b9f-51c3-4cef-a3b8-1e81866433ce" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1012.797501] env[62503]: DEBUG oslo_concurrency.lockutils [None req-48a33a3b-305c-4e0c-ab2b-8658d115689f tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Lock "9a792b9f-51c3-4cef-a3b8-1e81866433ce" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.119s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1012.797843] env[62503]: DEBUG oslo_concurrency.lockutils [None req-92a5d236-2430-47a1-b059-b485c63a180c tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Lock "9a792b9f-51c3-4cef-a3b8-1e81866433ce" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.154s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1012.798101] env[62503]: DEBUG oslo_concurrency.lockutils [None req-92a5d236-2430-47a1-b059-b485c63a180c tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Acquiring lock "9a792b9f-51c3-4cef-a3b8-1e81866433ce-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1012.798351] env[62503]: DEBUG oslo_concurrency.lockutils [None req-92a5d236-2430-47a1-b059-b485c63a180c tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Lock "9a792b9f-51c3-4cef-a3b8-1e81866433ce-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1012.798739] env[62503]: DEBUG oslo_concurrency.lockutils [None req-92a5d236-2430-47a1-b059-b485c63a180c tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Lock "9a792b9f-51c3-4cef-a3b8-1e81866433ce-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1012.800790] env[62503]: INFO nova.compute.manager [None req-92a5d236-2430-47a1-b059-b485c63a180c tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] [instance: 9a792b9f-51c3-4cef-a3b8-1e81866433ce] Terminating instance [ 1012.803472] env[62503]: DEBUG nova.compute.manager [None req-92a5d236-2430-47a1-b059-b485c63a180c tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] [instance: 9a792b9f-51c3-4cef-a3b8-1e81866433ce] Start destroying the instance on the hypervisor. {{(pid=62503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3156}} [ 1012.803683] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-92a5d236-2430-47a1-b059-b485c63a180c tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] [instance: 9a792b9f-51c3-4cef-a3b8-1e81866433ce] Destroying instance {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1012.804583] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d15ac6c4-d0c6-4c7e-bf8a-ec2c4da338f5 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.812781] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-92a5d236-2430-47a1-b059-b485c63a180c tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] [instance: 9a792b9f-51c3-4cef-a3b8-1e81866433ce] Powering off the VM {{(pid=62503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1012.813053] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d88912c8-d82d-483a-8bbb-c1e5a8ccf76e {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.819452] env[62503]: DEBUG oslo_vmware.api [None req-92a5d236-2430-47a1-b059-b485c63a180c tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Waiting for the task: (returnval){ [ 1012.819452] env[62503]: value = "task-1388135" [ 1012.819452] env[62503]: _type = "Task" [ 1012.819452] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1012.826629] env[62503]: DEBUG oslo_vmware.api [None req-92a5d236-2430-47a1-b059-b485c63a180c tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Task: {'id': task-1388135, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.881649] env[62503]: DEBUG nova.compute.utils [None req-f22de092-bce8-4b61-b91c-967b8220a556 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Using /dev/sd instead of None {{(pid=62503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1012.883027] env[62503]: DEBUG nova.compute.manager [None req-f22de092-bce8-4b61-b91c-967b8220a556 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: dd6341e2-cd68-4d12-80e7-51184d448764] Allocating IP information in the background. {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 1012.883204] env[62503]: DEBUG nova.network.neutron [None req-f22de092-bce8-4b61-b91c-967b8220a556 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: dd6341e2-cd68-4d12-80e7-51184d448764] allocate_for_instance() {{(pid=62503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1012.923433] env[62503]: DEBUG nova.policy [None req-f22de092-bce8-4b61-b91c-967b8220a556 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8d1fa794892747598a9c0b50bfd82581', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '12a42517cf8f4ad3836f2f95e8833dd4', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62503) authorize /opt/stack/nova/nova/policy.py:201}} [ 1013.111584] env[62503]: DEBUG oslo_vmware.api [None req-8df7546a-b1d3-40ff-b989-5cdabb388590 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Task: {'id': task-1388134, 'name': PowerOffVM_Task, 'duration_secs': 0.234629} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1013.112456] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-8df7546a-b1d3-40ff-b989-5cdabb388590 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] [instance: c6961bd3-16fa-4476-9d9c-8e91f7c0bee3] Powered off the VM {{(pid=62503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1013.112456] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-8df7546a-b1d3-40ff-b989-5cdabb388590 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] [instance: c6961bd3-16fa-4476-9d9c-8e91f7c0bee3] Unregistering the VM {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1013.112456] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7eddc12a-e092-484a-a103-e98e87b03991 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.199519] env[62503]: DEBUG nova.network.neutron [None req-f22de092-bce8-4b61-b91c-967b8220a556 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: dd6341e2-cd68-4d12-80e7-51184d448764] Successfully created port: 705c4161-6bc5-4a66-af1d-c76f64ef1a65 {{(pid=62503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1013.261865] env[62503]: DEBUG oslo_concurrency.lockutils [None req-9eef127f-bea4-498c-b329-dcc167473ad0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Acquiring lock "refresh_cache-c1a41261-03d3-4dde-9b90-68bdec1a548b" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1013.262207] env[62503]: DEBUG oslo_concurrency.lockutils [None req-9eef127f-bea4-498c-b329-dcc167473ad0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Acquired lock "refresh_cache-c1a41261-03d3-4dde-9b90-68bdec1a548b" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1013.262421] env[62503]: DEBUG nova.network.neutron [None req-9eef127f-bea4-498c-b329-dcc167473ad0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: c1a41261-03d3-4dde-9b90-68bdec1a548b] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1013.328048] env[62503]: DEBUG oslo_vmware.api [None req-92a5d236-2430-47a1-b059-b485c63a180c tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Task: {'id': task-1388135, 'name': PowerOffVM_Task, 'duration_secs': 0.167866} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1013.328374] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-92a5d236-2430-47a1-b059-b485c63a180c tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] [instance: 9a792b9f-51c3-4cef-a3b8-1e81866433ce] Powered off the VM {{(pid=62503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1013.328482] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-92a5d236-2430-47a1-b059-b485c63a180c tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] [instance: 9a792b9f-51c3-4cef-a3b8-1e81866433ce] Unregistering the VM {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1013.328893] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a9bffc9a-08b5-4f93-b381-4492d6406d54 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.388711] env[62503]: DEBUG nova.compute.manager [None req-f22de092-bce8-4b61-b91c-967b8220a556 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: dd6341e2-cd68-4d12-80e7-51184d448764] Start building block device mappings for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2867}} [ 1013.515303] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62c749ab-9cb2-47bb-9ff6-c10b2e30c971 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.523234] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a7176f8-52d3-4a76-a346-8c99ff00e4f7 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.554351] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a487e9e-1905-4a4c-9e2e-93b950d9577d {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.562076] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77a3295e-9b5e-437c-b9d5-e3666f1c2030 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.575529] env[62503]: DEBUG nova.compute.provider_tree [None req-9414a127-a077-42ca-aa67-a6c8616f966f tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1014.007189] env[62503]: DEBUG nova.network.neutron [None req-9eef127f-bea4-498c-b329-dcc167473ad0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: c1a41261-03d3-4dde-9b90-68bdec1a548b] Updating instance_info_cache with network_info: [{"id": "9d56fc29-052f-4ca8-908c-17b026450550", "address": "fa:16:3e:db:af:c1", "network": {"id": "fd4d9644-88d7-4c3d-9bee-9e25ca3a7d0b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2096443058-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f823912f7b1a4998a6dbc22060cf6c5e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "685b4083-b748-41fb-a68a-273b1073fa28", "external-id": "nsx-vlan-transportzone-312", "segmentation_id": 312, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9d56fc29-05", "ovs_interfaceid": "9d56fc29-052f-4ca8-908c-17b026450550", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1014.078350] env[62503]: DEBUG nova.scheduler.client.report [None req-9414a127-a077-42ca-aa67-a6c8616f966f tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1014.405620] env[62503]: DEBUG nova.compute.manager [None req-f22de092-bce8-4b61-b91c-967b8220a556 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: dd6341e2-cd68-4d12-80e7-51184d448764] Start spawning the instance on the hypervisor. {{(pid=62503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2641}} [ 1014.431549] env[62503]: DEBUG nova.virt.hardware [None req-f22de092-bce8-4b61-b91c-967b8220a556 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:26:20Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:26:03Z,direct_url=,disk_format='vmdk',id=8150ca02-f879-471d-8913-459408f127a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26d9ee75d25b4018b8bb1fb11c8bd98c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:26:04Z,virtual_size=,visibility=), allow threads: False {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1014.431915] env[62503]: DEBUG nova.virt.hardware [None req-f22de092-bce8-4b61-b91c-967b8220a556 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Flavor limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1014.432167] env[62503]: DEBUG nova.virt.hardware [None req-f22de092-bce8-4b61-b91c-967b8220a556 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Image limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1014.432402] env[62503]: DEBUG nova.virt.hardware [None req-f22de092-bce8-4b61-b91c-967b8220a556 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Flavor pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1014.433037] env[62503]: DEBUG nova.virt.hardware [None req-f22de092-bce8-4b61-b91c-967b8220a556 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Image pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1014.433037] env[62503]: DEBUG nova.virt.hardware [None req-f22de092-bce8-4b61-b91c-967b8220a556 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1014.433037] env[62503]: DEBUG nova.virt.hardware [None req-f22de092-bce8-4b61-b91c-967b8220a556 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1014.433255] env[62503]: DEBUG nova.virt.hardware [None req-f22de092-bce8-4b61-b91c-967b8220a556 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1014.433342] env[62503]: DEBUG nova.virt.hardware [None req-f22de092-bce8-4b61-b91c-967b8220a556 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Got 1 possible topologies {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1014.433508] env[62503]: DEBUG nova.virt.hardware [None req-f22de092-bce8-4b61-b91c-967b8220a556 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1014.433684] env[62503]: DEBUG nova.virt.hardware [None req-f22de092-bce8-4b61-b91c-967b8220a556 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1014.434643] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3d889aa-203a-4a6b-862a-c2e17aeeee31 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.443398] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d0d02a3-62ca-4869-91b3-d4873165c400 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.509747] env[62503]: DEBUG oslo_concurrency.lockutils [None req-9eef127f-bea4-498c-b329-dcc167473ad0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Releasing lock "refresh_cache-c1a41261-03d3-4dde-9b90-68bdec1a548b" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1014.583825] env[62503]: DEBUG oslo_concurrency.lockutils [None req-9414a127-a077-42ca-aa67-a6c8616f966f tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.210s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1014.584389] env[62503]: DEBUG nova.compute.manager [None req-9414a127-a077-42ca-aa67-a6c8616f966f tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: d811353d-a484-4c15-abfa-3ebbd37816fc] Start building networks asynchronously for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2832}} [ 1014.632779] env[62503]: DEBUG nova.compute.manager [req-30d887fd-99b7-4e9a-9acd-d92e10a26fb1 req-0ea75a9d-9d09-46f0-ad90-768c3c2fb258 service nova] [instance: dd6341e2-cd68-4d12-80e7-51184d448764] Received event network-vif-plugged-705c4161-6bc5-4a66-af1d-c76f64ef1a65 {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 1014.633872] env[62503]: DEBUG oslo_concurrency.lockutils [req-30d887fd-99b7-4e9a-9acd-d92e10a26fb1 req-0ea75a9d-9d09-46f0-ad90-768c3c2fb258 service nova] Acquiring lock "dd6341e2-cd68-4d12-80e7-51184d448764-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1014.633872] env[62503]: DEBUG oslo_concurrency.lockutils [req-30d887fd-99b7-4e9a-9acd-d92e10a26fb1 req-0ea75a9d-9d09-46f0-ad90-768c3c2fb258 service nova] Lock "dd6341e2-cd68-4d12-80e7-51184d448764-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1014.634186] env[62503]: DEBUG oslo_concurrency.lockutils [req-30d887fd-99b7-4e9a-9acd-d92e10a26fb1 req-0ea75a9d-9d09-46f0-ad90-768c3c2fb258 service nova] Lock "dd6341e2-cd68-4d12-80e7-51184d448764-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1014.634520] env[62503]: DEBUG nova.compute.manager [req-30d887fd-99b7-4e9a-9acd-d92e10a26fb1 req-0ea75a9d-9d09-46f0-ad90-768c3c2fb258 service nova] [instance: dd6341e2-cd68-4d12-80e7-51184d448764] No waiting events found dispatching network-vif-plugged-705c4161-6bc5-4a66-af1d-c76f64ef1a65 {{(pid=62503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1014.634870] env[62503]: WARNING nova.compute.manager [req-30d887fd-99b7-4e9a-9acd-d92e10a26fb1 req-0ea75a9d-9d09-46f0-ad90-768c3c2fb258 service nova] [instance: dd6341e2-cd68-4d12-80e7-51184d448764] Received unexpected event network-vif-plugged-705c4161-6bc5-4a66-af1d-c76f64ef1a65 for instance with vm_state building and task_state spawning. [ 1014.714708] env[62503]: DEBUG nova.network.neutron [None req-f22de092-bce8-4b61-b91c-967b8220a556 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: dd6341e2-cd68-4d12-80e7-51184d448764] Successfully updated port: 705c4161-6bc5-4a66-af1d-c76f64ef1a65 {{(pid=62503) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1015.038531] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efeaffa1-3fd7-44e3-886d-2b331b031fe8 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.057620] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6da7c559-e576-48eb-ba81-f64d7f49e76e {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.064330] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-9eef127f-bea4-498c-b329-dcc167473ad0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: c1a41261-03d3-4dde-9b90-68bdec1a548b] Updating instance 'c1a41261-03d3-4dde-9b90-68bdec1a548b' progress to 83 {{(pid=62503) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1015.090060] env[62503]: DEBUG nova.compute.utils [None req-9414a127-a077-42ca-aa67-a6c8616f966f tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Using /dev/sd instead of None {{(pid=62503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1015.091091] env[62503]: DEBUG nova.compute.manager [None req-9414a127-a077-42ca-aa67-a6c8616f966f tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: d811353d-a484-4c15-abfa-3ebbd37816fc] Allocating IP information in the background. {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 1015.091273] env[62503]: DEBUG nova.network.neutron [None req-9414a127-a077-42ca-aa67-a6c8616f966f tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: d811353d-a484-4c15-abfa-3ebbd37816fc] allocate_for_instance() {{(pid=62503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1015.130594] env[62503]: DEBUG nova.policy [None req-9414a127-a077-42ca-aa67-a6c8616f966f tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '05e8f1583c434155ae657655880ba2c6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '44139c74b4b349af996a67f408a8441f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62503) authorize /opt/stack/nova/nova/policy.py:201}} [ 1015.173656] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-8df7546a-b1d3-40ff-b989-5cdabb388590 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] [instance: c6961bd3-16fa-4476-9d9c-8e91f7c0bee3] Unregistered the VM {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1015.173888] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-8df7546a-b1d3-40ff-b989-5cdabb388590 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] [instance: c6961bd3-16fa-4476-9d9c-8e91f7c0bee3] Deleting contents of the VM from datastore datastore1 {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1015.174094] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-8df7546a-b1d3-40ff-b989-5cdabb388590 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Deleting the datastore file [datastore1] c6961bd3-16fa-4476-9d9c-8e91f7c0bee3 {{(pid=62503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1015.174369] env[62503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fe91571c-d8a1-4ef9-aeb7-3ad22455f7cc {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.182645] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-92a5d236-2430-47a1-b059-b485c63a180c tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] [instance: 9a792b9f-51c3-4cef-a3b8-1e81866433ce] Unregistered the VM {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1015.182880] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-92a5d236-2430-47a1-b059-b485c63a180c tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] [instance: 9a792b9f-51c3-4cef-a3b8-1e81866433ce] Deleting contents of the VM from datastore datastore2 {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1015.183086] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-92a5d236-2430-47a1-b059-b485c63a180c tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Deleting the datastore file [datastore2] 9a792b9f-51c3-4cef-a3b8-1e81866433ce {{(pid=62503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1015.183738] env[62503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-11edebed-251a-44e2-b796-832d62c10d6e {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.186152] env[62503]: DEBUG oslo_vmware.api [None req-8df7546a-b1d3-40ff-b989-5cdabb388590 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Waiting for the task: (returnval){ [ 1015.186152] env[62503]: value = "task-1388138" [ 1015.186152] env[62503]: _type = "Task" [ 1015.186152] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1015.191335] env[62503]: DEBUG oslo_vmware.api [None req-92a5d236-2430-47a1-b059-b485c63a180c tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Waiting for the task: (returnval){ [ 1015.191335] env[62503]: value = "task-1388139" [ 1015.191335] env[62503]: _type = "Task" [ 1015.191335] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1015.197838] env[62503]: DEBUG oslo_vmware.api [None req-8df7546a-b1d3-40ff-b989-5cdabb388590 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Task: {'id': task-1388138, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.202675] env[62503]: DEBUG oslo_vmware.api [None req-92a5d236-2430-47a1-b059-b485c63a180c tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Task: {'id': task-1388139, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.219581] env[62503]: DEBUG oslo_concurrency.lockutils [None req-f22de092-bce8-4b61-b91c-967b8220a556 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Acquiring lock "refresh_cache-dd6341e2-cd68-4d12-80e7-51184d448764" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1015.219744] env[62503]: DEBUG oslo_concurrency.lockutils [None req-f22de092-bce8-4b61-b91c-967b8220a556 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Acquired lock "refresh_cache-dd6341e2-cd68-4d12-80e7-51184d448764" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1015.219902] env[62503]: DEBUG nova.network.neutron [None req-f22de092-bce8-4b61-b91c-967b8220a556 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: dd6341e2-cd68-4d12-80e7-51184d448764] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1015.378066] env[62503]: DEBUG nova.network.neutron [None req-9414a127-a077-42ca-aa67-a6c8616f966f tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: d811353d-a484-4c15-abfa-3ebbd37816fc] Successfully created port: 7c7b64f1-ccd7-4141-a2a8-fe156a1d595d {{(pid=62503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1015.571190] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-9eef127f-bea4-498c-b329-dcc167473ad0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: c1a41261-03d3-4dde-9b90-68bdec1a548b] Powering on the VM {{(pid=62503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1015.571540] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9da2e815-ef8a-455d-aa8f-ec5c91e1a950 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.579376] env[62503]: DEBUG oslo_vmware.api [None req-9eef127f-bea4-498c-b329-dcc167473ad0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Waiting for the task: (returnval){ [ 1015.579376] env[62503]: value = "task-1388140" [ 1015.579376] env[62503]: _type = "Task" [ 1015.579376] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1015.588366] env[62503]: DEBUG oslo_vmware.api [None req-9eef127f-bea4-498c-b329-dcc167473ad0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Task: {'id': task-1388140, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.594066] env[62503]: DEBUG nova.compute.manager [None req-9414a127-a077-42ca-aa67-a6c8616f966f tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: d811353d-a484-4c15-abfa-3ebbd37816fc] Start building block device mappings for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2867}} [ 1015.698872] env[62503]: DEBUG oslo_vmware.api [None req-8df7546a-b1d3-40ff-b989-5cdabb388590 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Task: {'id': task-1388138, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.155464} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1015.699573] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-8df7546a-b1d3-40ff-b989-5cdabb388590 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Deleted the datastore file {{(pid=62503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1015.699860] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-8df7546a-b1d3-40ff-b989-5cdabb388590 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] [instance: c6961bd3-16fa-4476-9d9c-8e91f7c0bee3] Deleted contents of the VM from datastore datastore1 {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1015.700253] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-8df7546a-b1d3-40ff-b989-5cdabb388590 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] [instance: c6961bd3-16fa-4476-9d9c-8e91f7c0bee3] Instance destroyed {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1015.700502] env[62503]: INFO nova.compute.manager [None req-8df7546a-b1d3-40ff-b989-5cdabb388590 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] [instance: c6961bd3-16fa-4476-9d9c-8e91f7c0bee3] Took 3.11 seconds to destroy the instance on the hypervisor. [ 1015.700805] env[62503]: DEBUG oslo.service.loopingcall [None req-8df7546a-b1d3-40ff-b989-5cdabb388590 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1015.703873] env[62503]: DEBUG nova.compute.manager [-] [instance: c6961bd3-16fa-4476-9d9c-8e91f7c0bee3] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 1015.703986] env[62503]: DEBUG nova.network.neutron [-] [instance: c6961bd3-16fa-4476-9d9c-8e91f7c0bee3] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1015.705650] env[62503]: DEBUG oslo_vmware.api [None req-92a5d236-2430-47a1-b059-b485c63a180c tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Task: {'id': task-1388139, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.14934} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1015.705893] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-92a5d236-2430-47a1-b059-b485c63a180c tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Deleted the datastore file {{(pid=62503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1015.706154] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-92a5d236-2430-47a1-b059-b485c63a180c tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] [instance: 9a792b9f-51c3-4cef-a3b8-1e81866433ce] Deleted contents of the VM from datastore datastore2 {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1015.706344] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-92a5d236-2430-47a1-b059-b485c63a180c tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] [instance: 9a792b9f-51c3-4cef-a3b8-1e81866433ce] Instance destroyed {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1015.706517] env[62503]: INFO nova.compute.manager [None req-92a5d236-2430-47a1-b059-b485c63a180c tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] [instance: 9a792b9f-51c3-4cef-a3b8-1e81866433ce] Took 2.90 seconds to destroy the instance on the hypervisor. [ 1015.706739] env[62503]: DEBUG oslo.service.loopingcall [None req-92a5d236-2430-47a1-b059-b485c63a180c tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1015.707210] env[62503]: DEBUG nova.compute.manager [-] [instance: 9a792b9f-51c3-4cef-a3b8-1e81866433ce] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 1015.707303] env[62503]: DEBUG nova.network.neutron [-] [instance: 9a792b9f-51c3-4cef-a3b8-1e81866433ce] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1015.782480] env[62503]: DEBUG nova.network.neutron [None req-f22de092-bce8-4b61-b91c-967b8220a556 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: dd6341e2-cd68-4d12-80e7-51184d448764] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1015.979060] env[62503]: DEBUG nova.compute.manager [req-32f2ce9b-0059-41f3-91ed-65cb2bfc6bcf req-5fc20af1-7899-485e-8af4-072260f62fb4 service nova] [instance: c6961bd3-16fa-4476-9d9c-8e91f7c0bee3] Received event network-vif-deleted-767e9e9f-4fd3-404b-b2e3-3cc22e7511bf {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 1015.979364] env[62503]: INFO nova.compute.manager [req-32f2ce9b-0059-41f3-91ed-65cb2bfc6bcf req-5fc20af1-7899-485e-8af4-072260f62fb4 service nova] [instance: c6961bd3-16fa-4476-9d9c-8e91f7c0bee3] Neutron deleted interface 767e9e9f-4fd3-404b-b2e3-3cc22e7511bf; detaching it from the instance and deleting it from the info cache [ 1015.979458] env[62503]: DEBUG nova.network.neutron [req-32f2ce9b-0059-41f3-91ed-65cb2bfc6bcf req-5fc20af1-7899-485e-8af4-072260f62fb4 service nova] [instance: c6961bd3-16fa-4476-9d9c-8e91f7c0bee3] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1016.087114] env[62503]: DEBUG nova.network.neutron [None req-f22de092-bce8-4b61-b91c-967b8220a556 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: dd6341e2-cd68-4d12-80e7-51184d448764] Updating instance_info_cache with network_info: [{"id": "705c4161-6bc5-4a66-af1d-c76f64ef1a65", "address": "fa:16:3e:39:42:0e", "network": {"id": "ef338e2b-4aa8-4605-8748-910d0d3a3079", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-477257305-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "12a42517cf8f4ad3836f2f95e8833dd4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "13e71dbb-4279-427c-b39d-ba5df9895e58", "external-id": "nsx-vlan-transportzone-417", "segmentation_id": 417, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap705c4161-6b", "ovs_interfaceid": "705c4161-6bc5-4a66-af1d-c76f64ef1a65", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1016.093963] env[62503]: DEBUG oslo_vmware.api [None req-9eef127f-bea4-498c-b329-dcc167473ad0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Task: {'id': task-1388140, 'name': PowerOnVM_Task, 'duration_secs': 0.39211} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1016.094180] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-9eef127f-bea4-498c-b329-dcc167473ad0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: c1a41261-03d3-4dde-9b90-68bdec1a548b] Powered on the VM {{(pid=62503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1016.094374] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-9eef127f-bea4-498c-b329-dcc167473ad0 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: c1a41261-03d3-4dde-9b90-68bdec1a548b] Updating instance 'c1a41261-03d3-4dde-9b90-68bdec1a548b' progress to 100 {{(pid=62503) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1016.449758] env[62503]: DEBUG nova.network.neutron [-] [instance: c6961bd3-16fa-4476-9d9c-8e91f7c0bee3] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1016.482802] env[62503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-37dc9db6-2920-4024-8fd4-9b282a25c719 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.492017] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f3bfef1-2fff-4b2a-be57-9a02e505c348 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.516621] env[62503]: DEBUG nova.compute.manager [req-32f2ce9b-0059-41f3-91ed-65cb2bfc6bcf req-5fc20af1-7899-485e-8af4-072260f62fb4 service nova] [instance: c6961bd3-16fa-4476-9d9c-8e91f7c0bee3] Detach interface failed, port_id=767e9e9f-4fd3-404b-b2e3-3cc22e7511bf, reason: Instance c6961bd3-16fa-4476-9d9c-8e91f7c0bee3 could not be found. {{(pid=62503) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11261}} [ 1016.589936] env[62503]: DEBUG oslo_concurrency.lockutils [None req-f22de092-bce8-4b61-b91c-967b8220a556 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Releasing lock "refresh_cache-dd6341e2-cd68-4d12-80e7-51184d448764" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1016.590294] env[62503]: DEBUG nova.compute.manager [None req-f22de092-bce8-4b61-b91c-967b8220a556 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: dd6341e2-cd68-4d12-80e7-51184d448764] Instance network_info: |[{"id": "705c4161-6bc5-4a66-af1d-c76f64ef1a65", "address": "fa:16:3e:39:42:0e", "network": {"id": "ef338e2b-4aa8-4605-8748-910d0d3a3079", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-477257305-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "12a42517cf8f4ad3836f2f95e8833dd4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "13e71dbb-4279-427c-b39d-ba5df9895e58", "external-id": "nsx-vlan-transportzone-417", "segmentation_id": 417, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap705c4161-6b", "ovs_interfaceid": "705c4161-6bc5-4a66-af1d-c76f64ef1a65", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1999}} [ 1016.590724] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-f22de092-bce8-4b61-b91c-967b8220a556 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: dd6341e2-cd68-4d12-80e7-51184d448764] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:39:42:0e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '13e71dbb-4279-427c-b39d-ba5df9895e58', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '705c4161-6bc5-4a66-af1d-c76f64ef1a65', 'vif_model': 'vmxnet3'}] {{(pid=62503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1016.598341] env[62503]: DEBUG oslo.service.loopingcall [None req-f22de092-bce8-4b61-b91c-967b8220a556 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1016.602034] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dd6341e2-cd68-4d12-80e7-51184d448764] Creating VM on the ESX host {{(pid=62503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1016.603649] env[62503]: DEBUG nova.compute.manager [None req-9414a127-a077-42ca-aa67-a6c8616f966f tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: d811353d-a484-4c15-abfa-3ebbd37816fc] Start spawning the instance on the hypervisor. {{(pid=62503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2641}} [ 1016.605609] env[62503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0f8a0cf4-447b-41fc-a5ad-fe075cc588bf {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.631763] env[62503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1016.631763] env[62503]: value = "task-1388141" [ 1016.631763] env[62503]: _type = "Task" [ 1016.631763] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1016.641212] env[62503]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388141, 'name': CreateVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1016.650447] env[62503]: DEBUG nova.virt.hardware [None req-9414a127-a077-42ca-aa67-a6c8616f966f tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:26:20Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:26:03Z,direct_url=,disk_format='vmdk',id=8150ca02-f879-471d-8913-459408f127a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26d9ee75d25b4018b8bb1fb11c8bd98c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:26:04Z,virtual_size=,visibility=), allow threads: False {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1016.650714] env[62503]: DEBUG nova.virt.hardware [None req-9414a127-a077-42ca-aa67-a6c8616f966f tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Flavor limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1016.650874] env[62503]: DEBUG nova.virt.hardware [None req-9414a127-a077-42ca-aa67-a6c8616f966f tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Image limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1016.651116] env[62503]: DEBUG nova.virt.hardware [None req-9414a127-a077-42ca-aa67-a6c8616f966f tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Flavor pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1016.651349] env[62503]: DEBUG nova.virt.hardware [None req-9414a127-a077-42ca-aa67-a6c8616f966f tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Image pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1016.651523] env[62503]: DEBUG nova.virt.hardware [None req-9414a127-a077-42ca-aa67-a6c8616f966f tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1016.651738] env[62503]: DEBUG nova.virt.hardware [None req-9414a127-a077-42ca-aa67-a6c8616f966f tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1016.651939] env[62503]: DEBUG nova.virt.hardware [None req-9414a127-a077-42ca-aa67-a6c8616f966f tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1016.652160] env[62503]: DEBUG nova.virt.hardware [None req-9414a127-a077-42ca-aa67-a6c8616f966f tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Got 1 possible topologies {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1016.652366] env[62503]: DEBUG nova.virt.hardware [None req-9414a127-a077-42ca-aa67-a6c8616f966f tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1016.652545] env[62503]: DEBUG nova.virt.hardware [None req-9414a127-a077-42ca-aa67-a6c8616f966f tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1016.653496] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53391b66-0c6d-44b9-ac6a-f26dd1e43e04 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.664552] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cab1d56c-714f-4ee7-9912-3f9a6d285dfd {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.669923] env[62503]: DEBUG nova.compute.manager [req-8083c732-b77a-4385-abfd-acb7b70a5bdb req-e9b8186e-4b63-4d16-b148-2cd8408bc125 service nova] [instance: dd6341e2-cd68-4d12-80e7-51184d448764] Received event network-changed-705c4161-6bc5-4a66-af1d-c76f64ef1a65 {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 1016.670132] env[62503]: DEBUG nova.compute.manager [req-8083c732-b77a-4385-abfd-acb7b70a5bdb req-e9b8186e-4b63-4d16-b148-2cd8408bc125 service nova] [instance: dd6341e2-cd68-4d12-80e7-51184d448764] Refreshing instance network info cache due to event network-changed-705c4161-6bc5-4a66-af1d-c76f64ef1a65. {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11432}} [ 1016.670353] env[62503]: DEBUG oslo_concurrency.lockutils [req-8083c732-b77a-4385-abfd-acb7b70a5bdb req-e9b8186e-4b63-4d16-b148-2cd8408bc125 service nova] Acquiring lock "refresh_cache-dd6341e2-cd68-4d12-80e7-51184d448764" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1016.670500] env[62503]: DEBUG oslo_concurrency.lockutils [req-8083c732-b77a-4385-abfd-acb7b70a5bdb req-e9b8186e-4b63-4d16-b148-2cd8408bc125 service nova] Acquired lock "refresh_cache-dd6341e2-cd68-4d12-80e7-51184d448764" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1016.670663] env[62503]: DEBUG nova.network.neutron [req-8083c732-b77a-4385-abfd-acb7b70a5bdb req-e9b8186e-4b63-4d16-b148-2cd8408bc125 service nova] [instance: dd6341e2-cd68-4d12-80e7-51184d448764] Refreshing network info cache for port 705c4161-6bc5-4a66-af1d-c76f64ef1a65 {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1016.696270] env[62503]: DEBUG nova.network.neutron [-] [instance: 9a792b9f-51c3-4cef-a3b8-1e81866433ce] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1016.952145] env[62503]: INFO nova.compute.manager [-] [instance: c6961bd3-16fa-4476-9d9c-8e91f7c0bee3] Took 1.25 seconds to deallocate network for instance. [ 1017.142588] env[62503]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388141, 'name': CreateVM_Task, 'duration_secs': 0.393638} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1017.142756] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dd6341e2-cd68-4d12-80e7-51184d448764] Created VM on the ESX host {{(pid=62503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1017.143463] env[62503]: DEBUG oslo_concurrency.lockutils [None req-f22de092-bce8-4b61-b91c-967b8220a556 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1017.143635] env[62503]: DEBUG oslo_concurrency.lockutils [None req-f22de092-bce8-4b61-b91c-967b8220a556 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1017.143950] env[62503]: DEBUG oslo_concurrency.lockutils [None req-f22de092-bce8-4b61-b91c-967b8220a556 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1017.144206] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ad364ad2-bc8f-40cf-8f52-60315c6b8d4f {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.148458] env[62503]: DEBUG oslo_vmware.api [None req-f22de092-bce8-4b61-b91c-967b8220a556 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Waiting for the task: (returnval){ [ 1017.148458] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52b32f2b-77f9-161c-ea6a-e467c1bab8ef" [ 1017.148458] env[62503]: _type = "Task" [ 1017.148458] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1017.156699] env[62503]: DEBUG oslo_vmware.api [None req-f22de092-bce8-4b61-b91c-967b8220a556 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52b32f2b-77f9-161c-ea6a-e467c1bab8ef, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.199237] env[62503]: INFO nova.compute.manager [-] [instance: 9a792b9f-51c3-4cef-a3b8-1e81866433ce] Took 1.49 seconds to deallocate network for instance. [ 1017.404845] env[62503]: DEBUG nova.network.neutron [None req-9414a127-a077-42ca-aa67-a6c8616f966f tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: d811353d-a484-4c15-abfa-3ebbd37816fc] Successfully updated port: 7c7b64f1-ccd7-4141-a2a8-fe156a1d595d {{(pid=62503) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1017.458386] env[62503]: DEBUG oslo_concurrency.lockutils [None req-8df7546a-b1d3-40ff-b989-5cdabb388590 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1017.458605] env[62503]: DEBUG oslo_concurrency.lockutils [None req-8df7546a-b1d3-40ff-b989-5cdabb388590 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1017.458828] env[62503]: DEBUG nova.objects.instance [None req-8df7546a-b1d3-40ff-b989-5cdabb388590 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Lazy-loading 'resources' on Instance uuid c6961bd3-16fa-4476-9d9c-8e91f7c0bee3 {{(pid=62503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1017.522634] env[62503]: DEBUG nova.network.neutron [req-8083c732-b77a-4385-abfd-acb7b70a5bdb req-e9b8186e-4b63-4d16-b148-2cd8408bc125 service nova] [instance: dd6341e2-cd68-4d12-80e7-51184d448764] Updated VIF entry in instance network info cache for port 705c4161-6bc5-4a66-af1d-c76f64ef1a65. {{(pid=62503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1017.523021] env[62503]: DEBUG nova.network.neutron [req-8083c732-b77a-4385-abfd-acb7b70a5bdb req-e9b8186e-4b63-4d16-b148-2cd8408bc125 service nova] [instance: dd6341e2-cd68-4d12-80e7-51184d448764] Updating instance_info_cache with network_info: [{"id": "705c4161-6bc5-4a66-af1d-c76f64ef1a65", "address": "fa:16:3e:39:42:0e", "network": {"id": "ef338e2b-4aa8-4605-8748-910d0d3a3079", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-477257305-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "12a42517cf8f4ad3836f2f95e8833dd4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "13e71dbb-4279-427c-b39d-ba5df9895e58", "external-id": "nsx-vlan-transportzone-417", "segmentation_id": 417, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap705c4161-6b", "ovs_interfaceid": "705c4161-6bc5-4a66-af1d-c76f64ef1a65", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1017.659291] env[62503]: DEBUG oslo_vmware.api [None req-f22de092-bce8-4b61-b91c-967b8220a556 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52b32f2b-77f9-161c-ea6a-e467c1bab8ef, 'name': SearchDatastore_Task, 'duration_secs': 0.008779} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1017.659657] env[62503]: DEBUG oslo_concurrency.lockutils [None req-f22de092-bce8-4b61-b91c-967b8220a556 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1017.659811] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-f22de092-bce8-4b61-b91c-967b8220a556 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: dd6341e2-cd68-4d12-80e7-51184d448764] Processing image 8150ca02-f879-471d-8913-459408f127a1 {{(pid=62503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1017.660055] env[62503]: DEBUG oslo_concurrency.lockutils [None req-f22de092-bce8-4b61-b91c-967b8220a556 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1017.660211] env[62503]: DEBUG oslo_concurrency.lockutils [None req-f22de092-bce8-4b61-b91c-967b8220a556 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1017.660393] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-f22de092-bce8-4b61-b91c-967b8220a556 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1017.660674] env[62503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0d5fe81d-99df-4003-b285-dae5562e44e0 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.668862] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-f22de092-bce8-4b61-b91c-967b8220a556 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1017.669056] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-f22de092-bce8-4b61-b91c-967b8220a556 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1017.669730] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d3209cc7-0043-4c97-a7cb-a70cc939771f {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.675066] env[62503]: DEBUG oslo_vmware.api [None req-f22de092-bce8-4b61-b91c-967b8220a556 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Waiting for the task: (returnval){ [ 1017.675066] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52b37cc1-8128-3004-29ed-a31869a19c94" [ 1017.675066] env[62503]: _type = "Task" [ 1017.675066] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1017.682454] env[62503]: DEBUG oslo_vmware.api [None req-f22de092-bce8-4b61-b91c-967b8220a556 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52b37cc1-8128-3004-29ed-a31869a19c94, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.706872] env[62503]: DEBUG oslo_concurrency.lockutils [None req-92a5d236-2430-47a1-b059-b485c63a180c tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1017.871376] env[62503]: DEBUG oslo_concurrency.lockutils [None req-4730124b-f040-4789-afea-1f1126d5d631 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Acquiring lock "c1a41261-03d3-4dde-9b90-68bdec1a548b" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1017.871638] env[62503]: DEBUG oslo_concurrency.lockutils [None req-4730124b-f040-4789-afea-1f1126d5d631 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Lock "c1a41261-03d3-4dde-9b90-68bdec1a548b" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1017.871847] env[62503]: DEBUG nova.compute.manager [None req-4730124b-f040-4789-afea-1f1126d5d631 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: c1a41261-03d3-4dde-9b90-68bdec1a548b] Going to confirm migration 2 {{(pid=62503) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5079}} [ 1017.907368] env[62503]: DEBUG oslo_concurrency.lockutils [None req-9414a127-a077-42ca-aa67-a6c8616f966f tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Acquiring lock "refresh_cache-d811353d-a484-4c15-abfa-3ebbd37816fc" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1017.907527] env[62503]: DEBUG oslo_concurrency.lockutils [None req-9414a127-a077-42ca-aa67-a6c8616f966f tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Acquired lock "refresh_cache-d811353d-a484-4c15-abfa-3ebbd37816fc" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1017.907641] env[62503]: DEBUG nova.network.neutron [None req-9414a127-a077-42ca-aa67-a6c8616f966f tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: d811353d-a484-4c15-abfa-3ebbd37816fc] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1018.027383] env[62503]: DEBUG oslo_concurrency.lockutils [req-8083c732-b77a-4385-abfd-acb7b70a5bdb req-e9b8186e-4b63-4d16-b148-2cd8408bc125 service nova] Releasing lock "refresh_cache-dd6341e2-cd68-4d12-80e7-51184d448764" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1018.027745] env[62503]: DEBUG nova.compute.manager [req-8083c732-b77a-4385-abfd-acb7b70a5bdb req-e9b8186e-4b63-4d16-b148-2cd8408bc125 service nova] [instance: 9a792b9f-51c3-4cef-a3b8-1e81866433ce] Received event network-vif-deleted-c58c8243-163d-4f88-b7b2-51ee586765d7 {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 1018.028020] env[62503]: INFO nova.compute.manager [req-8083c732-b77a-4385-abfd-acb7b70a5bdb req-e9b8186e-4b63-4d16-b148-2cd8408bc125 service nova] [instance: 9a792b9f-51c3-4cef-a3b8-1e81866433ce] Neutron deleted interface c58c8243-163d-4f88-b7b2-51ee586765d7; detaching it from the instance and deleting it from the info cache [ 1018.028259] env[62503]: DEBUG nova.network.neutron [req-8083c732-b77a-4385-abfd-acb7b70a5bdb req-e9b8186e-4b63-4d16-b148-2cd8408bc125 service nova] [instance: 9a792b9f-51c3-4cef-a3b8-1e81866433ce] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1018.083810] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2f26f08-e4db-402f-b018-7d20087cc796 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.091539] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-570ee746-942b-499a-8284-0285b531b231 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.125018] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e59c439-1b37-41f5-9c88-d9962325526f {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.133309] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-730063b9-f27e-4bd2-9011-599bd395f133 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.147584] env[62503]: DEBUG nova.compute.provider_tree [None req-8df7546a-b1d3-40ff-b989-5cdabb388590 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1018.186160] env[62503]: DEBUG oslo_vmware.api [None req-f22de092-bce8-4b61-b91c-967b8220a556 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52b37cc1-8128-3004-29ed-a31869a19c94, 'name': SearchDatastore_Task, 'duration_secs': 0.007933} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1018.186954] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-97343b03-9b6a-4666-894b-58974d3a2a4d {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.192432] env[62503]: DEBUG oslo_vmware.api [None req-f22de092-bce8-4b61-b91c-967b8220a556 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Waiting for the task: (returnval){ [ 1018.192432] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]5286b053-34be-a86a-ab02-424afc96642a" [ 1018.192432] env[62503]: _type = "Task" [ 1018.192432] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1018.200387] env[62503]: DEBUG oslo_vmware.api [None req-f22de092-bce8-4b61-b91c-967b8220a556 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]5286b053-34be-a86a-ab02-424afc96642a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.443856] env[62503]: DEBUG oslo_concurrency.lockutils [None req-4730124b-f040-4789-afea-1f1126d5d631 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Acquiring lock "refresh_cache-c1a41261-03d3-4dde-9b90-68bdec1a548b" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1018.444104] env[62503]: DEBUG oslo_concurrency.lockutils [None req-4730124b-f040-4789-afea-1f1126d5d631 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Acquired lock "refresh_cache-c1a41261-03d3-4dde-9b90-68bdec1a548b" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1018.444297] env[62503]: DEBUG nova.network.neutron [None req-4730124b-f040-4789-afea-1f1126d5d631 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: c1a41261-03d3-4dde-9b90-68bdec1a548b] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1018.444526] env[62503]: DEBUG nova.objects.instance [None req-4730124b-f040-4789-afea-1f1126d5d631 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Lazy-loading 'info_cache' on Instance uuid c1a41261-03d3-4dde-9b90-68bdec1a548b {{(pid=62503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1018.451466] env[62503]: DEBUG nova.network.neutron [None req-9414a127-a077-42ca-aa67-a6c8616f966f tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: d811353d-a484-4c15-abfa-3ebbd37816fc] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1018.531457] env[62503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-cb005c50-5c5c-49e9-b472-1f2ddbe23454 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.541064] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1df6b791-8ec9-4e9b-9d4d-7da58a5f403e {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.570351] env[62503]: DEBUG nova.compute.manager [req-8083c732-b77a-4385-abfd-acb7b70a5bdb req-e9b8186e-4b63-4d16-b148-2cd8408bc125 service nova] [instance: 9a792b9f-51c3-4cef-a3b8-1e81866433ce] Detach interface failed, port_id=c58c8243-163d-4f88-b7b2-51ee586765d7, reason: Instance 9a792b9f-51c3-4cef-a3b8-1e81866433ce could not be found. {{(pid=62503) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11261}} [ 1018.650866] env[62503]: DEBUG nova.scheduler.client.report [None req-8df7546a-b1d3-40ff-b989-5cdabb388590 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1018.654938] env[62503]: DEBUG nova.network.neutron [None req-9414a127-a077-42ca-aa67-a6c8616f966f tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: d811353d-a484-4c15-abfa-3ebbd37816fc] Updating instance_info_cache with network_info: [{"id": "7c7b64f1-ccd7-4141-a2a8-fe156a1d595d", "address": "fa:16:3e:20:9b:dc", "network": {"id": "ae12a8a2-c7af-40e0-878a-95b9a17d8356", "bridge": "br-int", "label": "tempest-ServersTestJSON-1776284685-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "44139c74b4b349af996a67f408a8441f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51ae336c-12cf-406a-b1ca-54e9ce553b3e", "external-id": "nsx-vlan-transportzone-30", "segmentation_id": 30, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7c7b64f1-cc", "ovs_interfaceid": "7c7b64f1-ccd7-4141-a2a8-fe156a1d595d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1018.687044] env[62503]: DEBUG nova.compute.manager [req-52a31de4-d226-48d3-a813-66e5e3f55a59 req-a8944106-f2de-4797-b8a9-b00a7c0ef132 service nova] [instance: d811353d-a484-4c15-abfa-3ebbd37816fc] Received event network-vif-plugged-7c7b64f1-ccd7-4141-a2a8-fe156a1d595d {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 1018.687291] env[62503]: DEBUG oslo_concurrency.lockutils [req-52a31de4-d226-48d3-a813-66e5e3f55a59 req-a8944106-f2de-4797-b8a9-b00a7c0ef132 service nova] Acquiring lock "d811353d-a484-4c15-abfa-3ebbd37816fc-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1018.687546] env[62503]: DEBUG oslo_concurrency.lockutils [req-52a31de4-d226-48d3-a813-66e5e3f55a59 req-a8944106-f2de-4797-b8a9-b00a7c0ef132 service nova] Lock "d811353d-a484-4c15-abfa-3ebbd37816fc-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1018.687666] env[62503]: DEBUG oslo_concurrency.lockutils [req-52a31de4-d226-48d3-a813-66e5e3f55a59 req-a8944106-f2de-4797-b8a9-b00a7c0ef132 service nova] Lock "d811353d-a484-4c15-abfa-3ebbd37816fc-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1018.687834] env[62503]: DEBUG nova.compute.manager [req-52a31de4-d226-48d3-a813-66e5e3f55a59 req-a8944106-f2de-4797-b8a9-b00a7c0ef132 service nova] [instance: d811353d-a484-4c15-abfa-3ebbd37816fc] No waiting events found dispatching network-vif-plugged-7c7b64f1-ccd7-4141-a2a8-fe156a1d595d {{(pid=62503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1018.688008] env[62503]: WARNING nova.compute.manager [req-52a31de4-d226-48d3-a813-66e5e3f55a59 req-a8944106-f2de-4797-b8a9-b00a7c0ef132 service nova] [instance: d811353d-a484-4c15-abfa-3ebbd37816fc] Received unexpected event network-vif-plugged-7c7b64f1-ccd7-4141-a2a8-fe156a1d595d for instance with vm_state building and task_state spawning. [ 1018.688179] env[62503]: DEBUG nova.compute.manager [req-52a31de4-d226-48d3-a813-66e5e3f55a59 req-a8944106-f2de-4797-b8a9-b00a7c0ef132 service nova] [instance: d811353d-a484-4c15-abfa-3ebbd37816fc] Received event network-changed-7c7b64f1-ccd7-4141-a2a8-fe156a1d595d {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 1018.688331] env[62503]: DEBUG nova.compute.manager [req-52a31de4-d226-48d3-a813-66e5e3f55a59 req-a8944106-f2de-4797-b8a9-b00a7c0ef132 service nova] [instance: d811353d-a484-4c15-abfa-3ebbd37816fc] Refreshing instance network info cache due to event network-changed-7c7b64f1-ccd7-4141-a2a8-fe156a1d595d. {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11432}} [ 1018.688492] env[62503]: DEBUG oslo_concurrency.lockutils [req-52a31de4-d226-48d3-a813-66e5e3f55a59 req-a8944106-f2de-4797-b8a9-b00a7c0ef132 service nova] Acquiring lock "refresh_cache-d811353d-a484-4c15-abfa-3ebbd37816fc" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1018.703108] env[62503]: DEBUG oslo_vmware.api [None req-f22de092-bce8-4b61-b91c-967b8220a556 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]5286b053-34be-a86a-ab02-424afc96642a, 'name': SearchDatastore_Task, 'duration_secs': 0.009377} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1018.703386] env[62503]: DEBUG oslo_concurrency.lockutils [None req-f22de092-bce8-4b61-b91c-967b8220a556 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1018.703637] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-f22de092-bce8-4b61-b91c-967b8220a556 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk to [datastore2] dd6341e2-cd68-4d12-80e7-51184d448764/dd6341e2-cd68-4d12-80e7-51184d448764.vmdk {{(pid=62503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1018.703898] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-86da4df2-18e6-4cd4-a159-57c742c4a302 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.710213] env[62503]: DEBUG oslo_vmware.api [None req-f22de092-bce8-4b61-b91c-967b8220a556 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Waiting for the task: (returnval){ [ 1018.710213] env[62503]: value = "task-1388142" [ 1018.710213] env[62503]: _type = "Task" [ 1018.710213] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1018.718525] env[62503]: DEBUG oslo_vmware.api [None req-f22de092-bce8-4b61-b91c-967b8220a556 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Task: {'id': task-1388142, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.157859] env[62503]: DEBUG oslo_concurrency.lockutils [None req-8df7546a-b1d3-40ff-b989-5cdabb388590 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.699s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1019.160294] env[62503]: DEBUG oslo_concurrency.lockutils [None req-92a5d236-2430-47a1-b059-b485c63a180c tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.453s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1019.160526] env[62503]: DEBUG nova.objects.instance [None req-92a5d236-2430-47a1-b059-b485c63a180c tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Lazy-loading 'resources' on Instance uuid 9a792b9f-51c3-4cef-a3b8-1e81866433ce {{(pid=62503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1019.161626] env[62503]: DEBUG oslo_concurrency.lockutils [None req-9414a127-a077-42ca-aa67-a6c8616f966f tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Releasing lock "refresh_cache-d811353d-a484-4c15-abfa-3ebbd37816fc" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1019.161933] env[62503]: DEBUG nova.compute.manager [None req-9414a127-a077-42ca-aa67-a6c8616f966f tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: d811353d-a484-4c15-abfa-3ebbd37816fc] Instance network_info: |[{"id": "7c7b64f1-ccd7-4141-a2a8-fe156a1d595d", "address": "fa:16:3e:20:9b:dc", "network": {"id": "ae12a8a2-c7af-40e0-878a-95b9a17d8356", "bridge": "br-int", "label": "tempest-ServersTestJSON-1776284685-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "44139c74b4b349af996a67f408a8441f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51ae336c-12cf-406a-b1ca-54e9ce553b3e", "external-id": "nsx-vlan-transportzone-30", "segmentation_id": 30, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7c7b64f1-cc", "ovs_interfaceid": "7c7b64f1-ccd7-4141-a2a8-fe156a1d595d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1999}} [ 1019.162411] env[62503]: DEBUG oslo_concurrency.lockutils [req-52a31de4-d226-48d3-a813-66e5e3f55a59 req-a8944106-f2de-4797-b8a9-b00a7c0ef132 service nova] Acquired lock "refresh_cache-d811353d-a484-4c15-abfa-3ebbd37816fc" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1019.162616] env[62503]: DEBUG nova.network.neutron [req-52a31de4-d226-48d3-a813-66e5e3f55a59 req-a8944106-f2de-4797-b8a9-b00a7c0ef132 service nova] [instance: d811353d-a484-4c15-abfa-3ebbd37816fc] Refreshing network info cache for port 7c7b64f1-ccd7-4141-a2a8-fe156a1d595d {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1019.163777] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-9414a127-a077-42ca-aa67-a6c8616f966f tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: d811353d-a484-4c15-abfa-3ebbd37816fc] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:20:9b:dc', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '51ae336c-12cf-406a-b1ca-54e9ce553b3e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7c7b64f1-ccd7-4141-a2a8-fe156a1d595d', 'vif_model': 'vmxnet3'}] {{(pid=62503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1019.171097] env[62503]: DEBUG oslo.service.loopingcall [None req-9414a127-a077-42ca-aa67-a6c8616f966f tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1019.174034] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d811353d-a484-4c15-abfa-3ebbd37816fc] Creating VM on the ESX host {{(pid=62503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1019.174895] env[62503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-67de9334-c693-46c9-9239-249ff27874a9 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.189629] env[62503]: INFO nova.scheduler.client.report [None req-8df7546a-b1d3-40ff-b989-5cdabb388590 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Deleted allocations for instance c6961bd3-16fa-4476-9d9c-8e91f7c0bee3 [ 1019.195800] env[62503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1019.195800] env[62503]: value = "task-1388143" [ 1019.195800] env[62503]: _type = "Task" [ 1019.195800] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1019.203247] env[62503]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388143, 'name': CreateVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.219547] env[62503]: DEBUG oslo_vmware.api [None req-f22de092-bce8-4b61-b91c-967b8220a556 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Task: {'id': task-1388142, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.447761} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1019.219806] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-f22de092-bce8-4b61-b91c-967b8220a556 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk to [datastore2] dd6341e2-cd68-4d12-80e7-51184d448764/dd6341e2-cd68-4d12-80e7-51184d448764.vmdk {{(pid=62503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1019.220050] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-f22de092-bce8-4b61-b91c-967b8220a556 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: dd6341e2-cd68-4d12-80e7-51184d448764] Extending root virtual disk to 1048576 {{(pid=62503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1019.220304] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6d538c68-36e9-4d2f-acca-6f5eecdf9c7e {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.228862] env[62503]: DEBUG oslo_vmware.api [None req-f22de092-bce8-4b61-b91c-967b8220a556 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Waiting for the task: (returnval){ [ 1019.228862] env[62503]: value = "task-1388144" [ 1019.228862] env[62503]: _type = "Task" [ 1019.228862] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1019.236765] env[62503]: DEBUG oslo_vmware.api [None req-f22de092-bce8-4b61-b91c-967b8220a556 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Task: {'id': task-1388144, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.403627] env[62503]: DEBUG nova.network.neutron [req-52a31de4-d226-48d3-a813-66e5e3f55a59 req-a8944106-f2de-4797-b8a9-b00a7c0ef132 service nova] [instance: d811353d-a484-4c15-abfa-3ebbd37816fc] Updated VIF entry in instance network info cache for port 7c7b64f1-ccd7-4141-a2a8-fe156a1d595d. {{(pid=62503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1019.404035] env[62503]: DEBUG nova.network.neutron [req-52a31de4-d226-48d3-a813-66e5e3f55a59 req-a8944106-f2de-4797-b8a9-b00a7c0ef132 service nova] [instance: d811353d-a484-4c15-abfa-3ebbd37816fc] Updating instance_info_cache with network_info: [{"id": "7c7b64f1-ccd7-4141-a2a8-fe156a1d595d", "address": "fa:16:3e:20:9b:dc", "network": {"id": "ae12a8a2-c7af-40e0-878a-95b9a17d8356", "bridge": "br-int", "label": "tempest-ServersTestJSON-1776284685-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "44139c74b4b349af996a67f408a8441f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51ae336c-12cf-406a-b1ca-54e9ce553b3e", "external-id": "nsx-vlan-transportzone-30", "segmentation_id": 30, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7c7b64f1-cc", "ovs_interfaceid": "7c7b64f1-ccd7-4141-a2a8-fe156a1d595d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1019.667427] env[62503]: DEBUG nova.network.neutron [None req-4730124b-f040-4789-afea-1f1126d5d631 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: c1a41261-03d3-4dde-9b90-68bdec1a548b] Updating instance_info_cache with network_info: [{"id": "9d56fc29-052f-4ca8-908c-17b026450550", "address": "fa:16:3e:db:af:c1", "network": {"id": "fd4d9644-88d7-4c3d-9bee-9e25ca3a7d0b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2096443058-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f823912f7b1a4998a6dbc22060cf6c5e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "685b4083-b748-41fb-a68a-273b1073fa28", "external-id": "nsx-vlan-transportzone-312", "segmentation_id": 312, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9d56fc29-05", "ovs_interfaceid": "9d56fc29-052f-4ca8-908c-17b026450550", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1019.699940] env[62503]: DEBUG oslo_concurrency.lockutils [None req-8df7546a-b1d3-40ff-b989-5cdabb388590 tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Lock "c6961bd3-16fa-4476-9d9c-8e91f7c0bee3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.118s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1019.706260] env[62503]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388143, 'name': CreateVM_Task, 'duration_secs': 0.32297} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1019.708428] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d811353d-a484-4c15-abfa-3ebbd37816fc] Created VM on the ESX host {{(pid=62503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1019.709254] env[62503]: DEBUG oslo_concurrency.lockutils [None req-9414a127-a077-42ca-aa67-a6c8616f966f tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1019.709434] env[62503]: DEBUG oslo_concurrency.lockutils [None req-9414a127-a077-42ca-aa67-a6c8616f966f tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1019.709743] env[62503]: DEBUG oslo_concurrency.lockutils [None req-9414a127-a077-42ca-aa67-a6c8616f966f tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1019.709992] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b3d54e95-28d2-4166-bc3d-56bdcb5bc8a2 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.715093] env[62503]: DEBUG oslo_vmware.api [None req-9414a127-a077-42ca-aa67-a6c8616f966f tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Waiting for the task: (returnval){ [ 1019.715093] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]5264a49c-3724-25c0-1bd8-0564ebcf2f6f" [ 1019.715093] env[62503]: _type = "Task" [ 1019.715093] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1019.731557] env[62503]: DEBUG oslo_vmware.api [None req-9414a127-a077-42ca-aa67-a6c8616f966f tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]5264a49c-3724-25c0-1bd8-0564ebcf2f6f, 'name': SearchDatastore_Task, 'duration_secs': 0.01163} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1019.734772] env[62503]: DEBUG oslo_concurrency.lockutils [None req-9414a127-a077-42ca-aa67-a6c8616f966f tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1019.735020] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-9414a127-a077-42ca-aa67-a6c8616f966f tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: d811353d-a484-4c15-abfa-3ebbd37816fc] Processing image 8150ca02-f879-471d-8913-459408f127a1 {{(pid=62503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1019.735255] env[62503]: DEBUG oslo_concurrency.lockutils [None req-9414a127-a077-42ca-aa67-a6c8616f966f tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1019.735409] env[62503]: DEBUG oslo_concurrency.lockutils [None req-9414a127-a077-42ca-aa67-a6c8616f966f tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1019.735591] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-9414a127-a077-42ca-aa67-a6c8616f966f tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1019.738061] env[62503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-91eb1e84-a280-4a7a-b789-2687f095a58d {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.744434] env[62503]: DEBUG oslo_vmware.api [None req-f22de092-bce8-4b61-b91c-967b8220a556 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Task: {'id': task-1388144, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.062151} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1019.744678] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-f22de092-bce8-4b61-b91c-967b8220a556 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: dd6341e2-cd68-4d12-80e7-51184d448764] Extended root virtual disk {{(pid=62503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1019.746217] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b858e8fd-c7a2-4b34-aac5-34461df4d555 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.748495] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-9414a127-a077-42ca-aa67-a6c8616f966f tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1019.748674] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-9414a127-a077-42ca-aa67-a6c8616f966f tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1019.751837] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e7834547-dcbb-4b00-b1b9-d288c559d15e {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.757051] env[62503]: DEBUG oslo_vmware.api [None req-9414a127-a077-42ca-aa67-a6c8616f966f tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Waiting for the task: (returnval){ [ 1019.757051] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]528a85ab-5436-b811-c7c3-94b3bcab8c2e" [ 1019.757051] env[62503]: _type = "Task" [ 1019.757051] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1019.774137] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-f22de092-bce8-4b61-b91c-967b8220a556 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: dd6341e2-cd68-4d12-80e7-51184d448764] Reconfiguring VM instance instance-00000064 to attach disk [datastore2] dd6341e2-cd68-4d12-80e7-51184d448764/dd6341e2-cd68-4d12-80e7-51184d448764.vmdk or device None with type sparse {{(pid=62503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1019.780256] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-69aa10ce-2300-49eb-b168-cc5e19769ae4 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.800722] env[62503]: DEBUG oslo_vmware.api [None req-9414a127-a077-42ca-aa67-a6c8616f966f tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]528a85ab-5436-b811-c7c3-94b3bcab8c2e, 'name': SearchDatastore_Task, 'duration_secs': 0.007874} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1019.802595] env[62503]: DEBUG oslo_vmware.api [None req-f22de092-bce8-4b61-b91c-967b8220a556 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Waiting for the task: (returnval){ [ 1019.802595] env[62503]: value = "task-1388145" [ 1019.802595] env[62503]: _type = "Task" [ 1019.802595] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1019.802805] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a6c293fd-54d5-4394-ba5d-0f10abfdd599 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.813180] env[62503]: DEBUG oslo_vmware.api [None req-9414a127-a077-42ca-aa67-a6c8616f966f tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Waiting for the task: (returnval){ [ 1019.813180] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]5262be55-fdbe-c7a2-a001-47aa87ae2254" [ 1019.813180] env[62503]: _type = "Task" [ 1019.813180] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1019.816763] env[62503]: DEBUG oslo_vmware.api [None req-f22de092-bce8-4b61-b91c-967b8220a556 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Task: {'id': task-1388145, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.821593] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4865c94a-741e-4574-96af-1a3c972ba3e8 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.826887] env[62503]: DEBUG oslo_vmware.api [None req-9414a127-a077-42ca-aa67-a6c8616f966f tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]5262be55-fdbe-c7a2-a001-47aa87ae2254, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.831183] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44ddeabd-1a28-4b81-93e7-3ecdcc4d2cd3 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.863496] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c289e44-6ca2-43fa-ade7-d55ead38a7c0 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.876425] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bf8dcfe-22bc-4625-ad61-d550e4894733 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.891324] env[62503]: DEBUG nova.compute.provider_tree [None req-92a5d236-2430-47a1-b059-b485c63a180c tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1019.907091] env[62503]: DEBUG oslo_concurrency.lockutils [req-52a31de4-d226-48d3-a813-66e5e3f55a59 req-a8944106-f2de-4797-b8a9-b00a7c0ef132 service nova] Releasing lock "refresh_cache-d811353d-a484-4c15-abfa-3ebbd37816fc" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1020.169891] env[62503]: DEBUG oslo_concurrency.lockutils [None req-4730124b-f040-4789-afea-1f1126d5d631 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Releasing lock "refresh_cache-c1a41261-03d3-4dde-9b90-68bdec1a548b" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1020.170210] env[62503]: DEBUG nova.objects.instance [None req-4730124b-f040-4789-afea-1f1126d5d631 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Lazy-loading 'migration_context' on Instance uuid c1a41261-03d3-4dde-9b90-68bdec1a548b {{(pid=62503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1020.315681] env[62503]: DEBUG oslo_vmware.api [None req-f22de092-bce8-4b61-b91c-967b8220a556 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Task: {'id': task-1388145, 'name': ReconfigVM_Task, 'duration_secs': 0.281102} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1020.315924] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-f22de092-bce8-4b61-b91c-967b8220a556 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: dd6341e2-cd68-4d12-80e7-51184d448764] Reconfigured VM instance instance-00000064 to attach disk [datastore2] dd6341e2-cd68-4d12-80e7-51184d448764/dd6341e2-cd68-4d12-80e7-51184d448764.vmdk or device None with type sparse {{(pid=62503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1020.316628] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3ab889ac-014b-4a90-a8e5-8da9ffe78e23 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.326279] env[62503]: DEBUG oslo_vmware.api [None req-9414a127-a077-42ca-aa67-a6c8616f966f tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]5262be55-fdbe-c7a2-a001-47aa87ae2254, 'name': SearchDatastore_Task, 'duration_secs': 0.010614} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1020.327338] env[62503]: DEBUG oslo_concurrency.lockutils [None req-9414a127-a077-42ca-aa67-a6c8616f966f tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1020.327595] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-9414a127-a077-42ca-aa67-a6c8616f966f tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk to [datastore2] d811353d-a484-4c15-abfa-3ebbd37816fc/d811353d-a484-4c15-abfa-3ebbd37816fc.vmdk {{(pid=62503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1020.327886] env[62503]: DEBUG oslo_vmware.api [None req-f22de092-bce8-4b61-b91c-967b8220a556 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Waiting for the task: (returnval){ [ 1020.327886] env[62503]: value = "task-1388146" [ 1020.327886] env[62503]: _type = "Task" [ 1020.327886] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1020.328081] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-20052bb3-a7f0-4239-b9f3-5b06715e1eca {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.336707] env[62503]: DEBUG oslo_vmware.api [None req-f22de092-bce8-4b61-b91c-967b8220a556 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Task: {'id': task-1388146, 'name': Rename_Task} progress is 5%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.337777] env[62503]: DEBUG oslo_vmware.api [None req-9414a127-a077-42ca-aa67-a6c8616f966f tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Waiting for the task: (returnval){ [ 1020.337777] env[62503]: value = "task-1388147" [ 1020.337777] env[62503]: _type = "Task" [ 1020.337777] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1020.344424] env[62503]: DEBUG oslo_vmware.api [None req-9414a127-a077-42ca-aa67-a6c8616f966f tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Task: {'id': task-1388147, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.370127] env[62503]: DEBUG oslo_concurrency.lockutils [None req-97f06f8d-0586-4a0a-97e9-6bf776957567 tempest-ServersV294TestFqdnHostnames-893294453 tempest-ServersV294TestFqdnHostnames-893294453-project-member] Acquiring lock "bba6c92b-cac3-4677-a8f4-57a2704fc685" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1020.370425] env[62503]: DEBUG oslo_concurrency.lockutils [None req-97f06f8d-0586-4a0a-97e9-6bf776957567 tempest-ServersV294TestFqdnHostnames-893294453 tempest-ServersV294TestFqdnHostnames-893294453-project-member] Lock "bba6c92b-cac3-4677-a8f4-57a2704fc685" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1020.370653] env[62503]: DEBUG oslo_concurrency.lockutils [None req-97f06f8d-0586-4a0a-97e9-6bf776957567 tempest-ServersV294TestFqdnHostnames-893294453 tempest-ServersV294TestFqdnHostnames-893294453-project-member] Acquiring lock "bba6c92b-cac3-4677-a8f4-57a2704fc685-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1020.370887] env[62503]: DEBUG oslo_concurrency.lockutils [None req-97f06f8d-0586-4a0a-97e9-6bf776957567 tempest-ServersV294TestFqdnHostnames-893294453 tempest-ServersV294TestFqdnHostnames-893294453-project-member] Lock "bba6c92b-cac3-4677-a8f4-57a2704fc685-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1020.371124] env[62503]: DEBUG oslo_concurrency.lockutils [None req-97f06f8d-0586-4a0a-97e9-6bf776957567 tempest-ServersV294TestFqdnHostnames-893294453 tempest-ServersV294TestFqdnHostnames-893294453-project-member] Lock "bba6c92b-cac3-4677-a8f4-57a2704fc685-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1020.373504] env[62503]: INFO nova.compute.manager [None req-97f06f8d-0586-4a0a-97e9-6bf776957567 tempest-ServersV294TestFqdnHostnames-893294453 tempest-ServersV294TestFqdnHostnames-893294453-project-member] [instance: bba6c92b-cac3-4677-a8f4-57a2704fc685] Terminating instance [ 1020.375391] env[62503]: DEBUG nova.compute.manager [None req-97f06f8d-0586-4a0a-97e9-6bf776957567 tempest-ServersV294TestFqdnHostnames-893294453 tempest-ServersV294TestFqdnHostnames-893294453-project-member] [instance: bba6c92b-cac3-4677-a8f4-57a2704fc685] Start destroying the instance on the hypervisor. {{(pid=62503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3156}} [ 1020.375586] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-97f06f8d-0586-4a0a-97e9-6bf776957567 tempest-ServersV294TestFqdnHostnames-893294453 tempest-ServersV294TestFqdnHostnames-893294453-project-member] [instance: bba6c92b-cac3-4677-a8f4-57a2704fc685] Destroying instance {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1020.376335] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3989447b-4e85-4e33-a50e-1713adba8cad {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.383534] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-97f06f8d-0586-4a0a-97e9-6bf776957567 tempest-ServersV294TestFqdnHostnames-893294453 tempest-ServersV294TestFqdnHostnames-893294453-project-member] [instance: bba6c92b-cac3-4677-a8f4-57a2704fc685] Powering off the VM {{(pid=62503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1020.383754] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-78a35fdf-9e29-4725-a12a-d90f8193bcc3 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.388941] env[62503]: DEBUG oslo_vmware.api [None req-97f06f8d-0586-4a0a-97e9-6bf776957567 tempest-ServersV294TestFqdnHostnames-893294453 tempest-ServersV294TestFqdnHostnames-893294453-project-member] Waiting for the task: (returnval){ [ 1020.388941] env[62503]: value = "task-1388148" [ 1020.388941] env[62503]: _type = "Task" [ 1020.388941] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1020.393890] env[62503]: DEBUG nova.scheduler.client.report [None req-92a5d236-2430-47a1-b059-b485c63a180c tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1020.399864] env[62503]: DEBUG oslo_vmware.api [None req-97f06f8d-0586-4a0a-97e9-6bf776957567 tempest-ServersV294TestFqdnHostnames-893294453 tempest-ServersV294TestFqdnHostnames-893294453-project-member] Task: {'id': task-1388148, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.673124] env[62503]: DEBUG nova.objects.base [None req-4730124b-f040-4789-afea-1f1126d5d631 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Object Instance lazy-loaded attributes: info_cache,migration_context {{(pid=62503) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1020.674088] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0615fc1f-72a1-4d0b-b79f-a81d923eb476 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.695513] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9108a38d-40b8-407d-9e75-759a1a29d0d0 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.701943] env[62503]: DEBUG oslo_vmware.api [None req-4730124b-f040-4789-afea-1f1126d5d631 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Waiting for the task: (returnval){ [ 1020.701943] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]5261f64e-abe1-a2a6-873c-3f930d4b9b91" [ 1020.701943] env[62503]: _type = "Task" [ 1020.701943] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1020.710449] env[62503]: DEBUG oslo_vmware.api [None req-4730124b-f040-4789-afea-1f1126d5d631 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]5261f64e-abe1-a2a6-873c-3f930d4b9b91, 'name': SearchDatastore_Task, 'duration_secs': 0.007105} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1020.710736] env[62503]: DEBUG oslo_concurrency.lockutils [None req-4730124b-f040-4789-afea-1f1126d5d631 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1020.839443] env[62503]: DEBUG oslo_vmware.api [None req-f22de092-bce8-4b61-b91c-967b8220a556 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Task: {'id': task-1388146, 'name': Rename_Task, 'duration_secs': 0.143628} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1020.842960] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-f22de092-bce8-4b61-b91c-967b8220a556 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: dd6341e2-cd68-4d12-80e7-51184d448764] Powering on the VM {{(pid=62503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1020.843296] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-da67c2e3-92fe-48f9-8b39-36638d0e9bb8 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.849582] env[62503]: DEBUG oslo_vmware.api [None req-9414a127-a077-42ca-aa67-a6c8616f966f tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Task: {'id': task-1388147, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.852723] env[62503]: DEBUG oslo_vmware.api [None req-f22de092-bce8-4b61-b91c-967b8220a556 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Waiting for the task: (returnval){ [ 1020.852723] env[62503]: value = "task-1388149" [ 1020.852723] env[62503]: _type = "Task" [ 1020.852723] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1020.860924] env[62503]: DEBUG oslo_vmware.api [None req-f22de092-bce8-4b61-b91c-967b8220a556 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Task: {'id': task-1388149, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.899574] env[62503]: DEBUG oslo_vmware.api [None req-97f06f8d-0586-4a0a-97e9-6bf776957567 tempest-ServersV294TestFqdnHostnames-893294453 tempest-ServersV294TestFqdnHostnames-893294453-project-member] Task: {'id': task-1388148, 'name': PowerOffVM_Task, 'duration_secs': 0.190847} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1020.899845] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-97f06f8d-0586-4a0a-97e9-6bf776957567 tempest-ServersV294TestFqdnHostnames-893294453 tempest-ServersV294TestFqdnHostnames-893294453-project-member] [instance: bba6c92b-cac3-4677-a8f4-57a2704fc685] Powered off the VM {{(pid=62503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1020.900063] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-97f06f8d-0586-4a0a-97e9-6bf776957567 tempest-ServersV294TestFqdnHostnames-893294453 tempest-ServersV294TestFqdnHostnames-893294453-project-member] [instance: bba6c92b-cac3-4677-a8f4-57a2704fc685] Unregistering the VM {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1020.900780] env[62503]: DEBUG oslo_concurrency.lockutils [None req-92a5d236-2430-47a1-b059-b485c63a180c tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.741s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1020.902641] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-438e8ea1-1c5a-4ea6-961b-c67120e4b633 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.904322] env[62503]: DEBUG oslo_concurrency.lockutils [None req-4730124b-f040-4789-afea-1f1126d5d631 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 0.194s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1020.925163] env[62503]: INFO nova.scheduler.client.report [None req-92a5d236-2430-47a1-b059-b485c63a180c tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Deleted allocations for instance 9a792b9f-51c3-4cef-a3b8-1e81866433ce [ 1020.962492] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-97f06f8d-0586-4a0a-97e9-6bf776957567 tempest-ServersV294TestFqdnHostnames-893294453 tempest-ServersV294TestFqdnHostnames-893294453-project-member] [instance: bba6c92b-cac3-4677-a8f4-57a2704fc685] Unregistered the VM {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1020.962735] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-97f06f8d-0586-4a0a-97e9-6bf776957567 tempest-ServersV294TestFqdnHostnames-893294453 tempest-ServersV294TestFqdnHostnames-893294453-project-member] [instance: bba6c92b-cac3-4677-a8f4-57a2704fc685] Deleting contents of the VM from datastore datastore2 {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1020.962986] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-97f06f8d-0586-4a0a-97e9-6bf776957567 tempest-ServersV294TestFqdnHostnames-893294453 tempest-ServersV294TestFqdnHostnames-893294453-project-member] Deleting the datastore file [datastore2] bba6c92b-cac3-4677-a8f4-57a2704fc685 {{(pid=62503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1020.963283] env[62503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6361b84f-4723-4793-a6c3-f5b4a650a419 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.968953] env[62503]: DEBUG oslo_vmware.api [None req-97f06f8d-0586-4a0a-97e9-6bf776957567 tempest-ServersV294TestFqdnHostnames-893294453 tempest-ServersV294TestFqdnHostnames-893294453-project-member] Waiting for the task: (returnval){ [ 1020.968953] env[62503]: value = "task-1388151" [ 1020.968953] env[62503]: _type = "Task" [ 1020.968953] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1020.977132] env[62503]: DEBUG oslo_vmware.api [None req-97f06f8d-0586-4a0a-97e9-6bf776957567 tempest-ServersV294TestFqdnHostnames-893294453 tempest-ServersV294TestFqdnHostnames-893294453-project-member] Task: {'id': task-1388151, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.349173] env[62503]: DEBUG oslo_vmware.api [None req-9414a127-a077-42ca-aa67-a6c8616f966f tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Task: {'id': task-1388147, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.560866} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1021.349398] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-9414a127-a077-42ca-aa67-a6c8616f966f tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk to [datastore2] d811353d-a484-4c15-abfa-3ebbd37816fc/d811353d-a484-4c15-abfa-3ebbd37816fc.vmdk {{(pid=62503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1021.349615] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-9414a127-a077-42ca-aa67-a6c8616f966f tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: d811353d-a484-4c15-abfa-3ebbd37816fc] Extending root virtual disk to 1048576 {{(pid=62503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1021.349860] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f840dd74-9322-4291-a504-4df7c9d2b197 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.356899] env[62503]: DEBUG oslo_vmware.api [None req-9414a127-a077-42ca-aa67-a6c8616f966f tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Waiting for the task: (returnval){ [ 1021.356899] env[62503]: value = "task-1388152" [ 1021.356899] env[62503]: _type = "Task" [ 1021.356899] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1021.362670] env[62503]: DEBUG oslo_vmware.api [None req-f22de092-bce8-4b61-b91c-967b8220a556 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Task: {'id': task-1388149, 'name': PowerOnVM_Task, 'duration_secs': 0.461814} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1021.363302] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-f22de092-bce8-4b61-b91c-967b8220a556 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: dd6341e2-cd68-4d12-80e7-51184d448764] Powered on the VM {{(pid=62503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1021.363514] env[62503]: INFO nova.compute.manager [None req-f22de092-bce8-4b61-b91c-967b8220a556 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: dd6341e2-cd68-4d12-80e7-51184d448764] Took 6.96 seconds to spawn the instance on the hypervisor. [ 1021.363777] env[62503]: DEBUG nova.compute.manager [None req-f22de092-bce8-4b61-b91c-967b8220a556 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: dd6341e2-cd68-4d12-80e7-51184d448764] Checking state {{(pid=62503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1794}} [ 1021.364545] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30e98f2f-bf24-40ce-a7ca-feb13f0b5d84 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.369779] env[62503]: DEBUG oslo_vmware.api [None req-9414a127-a077-42ca-aa67-a6c8616f966f tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Task: {'id': task-1388152, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.431745] env[62503]: DEBUG oslo_concurrency.lockutils [None req-92a5d236-2430-47a1-b059-b485c63a180c tempest-MultipleCreateTestJSON-300413839 tempest-MultipleCreateTestJSON-300413839-project-member] Lock "9a792b9f-51c3-4cef-a3b8-1e81866433ce" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.634s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1021.480656] env[62503]: DEBUG oslo_vmware.api [None req-97f06f8d-0586-4a0a-97e9-6bf776957567 tempest-ServersV294TestFqdnHostnames-893294453 tempest-ServersV294TestFqdnHostnames-893294453-project-member] Task: {'id': task-1388151, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.187175} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1021.481171] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-97f06f8d-0586-4a0a-97e9-6bf776957567 tempest-ServersV294TestFqdnHostnames-893294453 tempest-ServersV294TestFqdnHostnames-893294453-project-member] Deleted the datastore file {{(pid=62503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1021.481391] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-97f06f8d-0586-4a0a-97e9-6bf776957567 tempest-ServersV294TestFqdnHostnames-893294453 tempest-ServersV294TestFqdnHostnames-893294453-project-member] [instance: bba6c92b-cac3-4677-a8f4-57a2704fc685] Deleted contents of the VM from datastore datastore2 {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1021.481590] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-97f06f8d-0586-4a0a-97e9-6bf776957567 tempest-ServersV294TestFqdnHostnames-893294453 tempest-ServersV294TestFqdnHostnames-893294453-project-member] [instance: bba6c92b-cac3-4677-a8f4-57a2704fc685] Instance destroyed {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1021.481761] env[62503]: INFO nova.compute.manager [None req-97f06f8d-0586-4a0a-97e9-6bf776957567 tempest-ServersV294TestFqdnHostnames-893294453 tempest-ServersV294TestFqdnHostnames-893294453-project-member] [instance: bba6c92b-cac3-4677-a8f4-57a2704fc685] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1021.482057] env[62503]: DEBUG oslo.service.loopingcall [None req-97f06f8d-0586-4a0a-97e9-6bf776957567 tempest-ServersV294TestFqdnHostnames-893294453 tempest-ServersV294TestFqdnHostnames-893294453-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1021.482303] env[62503]: DEBUG nova.compute.manager [-] [instance: bba6c92b-cac3-4677-a8f4-57a2704fc685] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 1021.482400] env[62503]: DEBUG nova.network.neutron [-] [instance: bba6c92b-cac3-4677-a8f4-57a2704fc685] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1021.513542] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24f98eb1-d649-4aa8-ace7-602280b5d40f {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.520916] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfa82d9b-ef50-4dee-8e84-ead51c90adcc {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.553339] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eec6c900-8368-4387-acc7-abd122679447 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.569738] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1769e740-44b8-4c05-bd99-4677a2b74715 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.583715] env[62503]: DEBUG nova.compute.provider_tree [None req-4730124b-f040-4789-afea-1f1126d5d631 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1021.867403] env[62503]: DEBUG oslo_vmware.api [None req-9414a127-a077-42ca-aa67-a6c8616f966f tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Task: {'id': task-1388152, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.098321} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1021.867722] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-9414a127-a077-42ca-aa67-a6c8616f966f tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: d811353d-a484-4c15-abfa-3ebbd37816fc] Extended root virtual disk {{(pid=62503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1021.868418] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c4f108c-5b21-407a-a2fb-235a1a637780 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.898660] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-9414a127-a077-42ca-aa67-a6c8616f966f tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: d811353d-a484-4c15-abfa-3ebbd37816fc] Reconfiguring VM instance instance-00000065 to attach disk [datastore2] d811353d-a484-4c15-abfa-3ebbd37816fc/d811353d-a484-4c15-abfa-3ebbd37816fc.vmdk or device None with type sparse {{(pid=62503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1021.900718] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-938c7bb9-ae46-4cce-9d21-ef33596dac7d {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.916238] env[62503]: INFO nova.compute.manager [None req-f22de092-bce8-4b61-b91c-967b8220a556 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: dd6341e2-cd68-4d12-80e7-51184d448764] Took 11.74 seconds to build instance. [ 1021.922505] env[62503]: DEBUG oslo_vmware.api [None req-9414a127-a077-42ca-aa67-a6c8616f966f tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Waiting for the task: (returnval){ [ 1021.922505] env[62503]: value = "task-1388153" [ 1021.922505] env[62503]: _type = "Task" [ 1021.922505] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1021.932370] env[62503]: DEBUG oslo_vmware.api [None req-9414a127-a077-42ca-aa67-a6c8616f966f tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Task: {'id': task-1388153, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.086335] env[62503]: DEBUG nova.scheduler.client.report [None req-4730124b-f040-4789-afea-1f1126d5d631 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1022.417677] env[62503]: DEBUG oslo_concurrency.lockutils [None req-f22de092-bce8-4b61-b91c-967b8220a556 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Lock "dd6341e2-cd68-4d12-80e7-51184d448764" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.251s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1022.434928] env[62503]: DEBUG oslo_vmware.api [None req-9414a127-a077-42ca-aa67-a6c8616f966f tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Task: {'id': task-1388153, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.553789] env[62503]: DEBUG nova.compute.manager [req-c2b00f3a-d046-4c0f-b48a-4c1ea9d61a06 req-46658835-2c7c-4c1d-91c8-b590d1633451 service nova] [instance: bba6c92b-cac3-4677-a8f4-57a2704fc685] Received event network-vif-deleted-b2c7fd20-28f2-42c2-9cdd-d3ad7d14169a {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 1022.553789] env[62503]: INFO nova.compute.manager [req-c2b00f3a-d046-4c0f-b48a-4c1ea9d61a06 req-46658835-2c7c-4c1d-91c8-b590d1633451 service nova] [instance: bba6c92b-cac3-4677-a8f4-57a2704fc685] Neutron deleted interface b2c7fd20-28f2-42c2-9cdd-d3ad7d14169a; detaching it from the instance and deleting it from the info cache [ 1022.553881] env[62503]: DEBUG nova.network.neutron [req-c2b00f3a-d046-4c0f-b48a-4c1ea9d61a06 req-46658835-2c7c-4c1d-91c8-b590d1633451 service nova] [instance: bba6c92b-cac3-4677-a8f4-57a2704fc685] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1022.701996] env[62503]: DEBUG nova.network.neutron [-] [instance: bba6c92b-cac3-4677-a8f4-57a2704fc685] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1022.933295] env[62503]: DEBUG oslo_vmware.api [None req-9414a127-a077-42ca-aa67-a6c8616f966f tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Task: {'id': task-1388153, 'name': ReconfigVM_Task, 'duration_secs': 0.829659} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1022.933606] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-9414a127-a077-42ca-aa67-a6c8616f966f tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: d811353d-a484-4c15-abfa-3ebbd37816fc] Reconfigured VM instance instance-00000065 to attach disk [datastore2] d811353d-a484-4c15-abfa-3ebbd37816fc/d811353d-a484-4c15-abfa-3ebbd37816fc.vmdk or device None with type sparse {{(pid=62503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1022.934248] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0f415441-bcad-4d8a-9da4-3d4702cde60b {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.941277] env[62503]: DEBUG oslo_vmware.api [None req-9414a127-a077-42ca-aa67-a6c8616f966f tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Waiting for the task: (returnval){ [ 1022.941277] env[62503]: value = "task-1388154" [ 1022.941277] env[62503]: _type = "Task" [ 1022.941277] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1022.949384] env[62503]: DEBUG oslo_vmware.api [None req-9414a127-a077-42ca-aa67-a6c8616f966f tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Task: {'id': task-1388154, 'name': Rename_Task} progress is 5%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.058225] env[62503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9ce836ce-0f3e-4d94-886b-beaa54d8859c {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.067713] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fd8dbfb-7d37-46d3-94c9-3a7fefb33397 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.094241] env[62503]: DEBUG nova.compute.manager [req-c2b00f3a-d046-4c0f-b48a-4c1ea9d61a06 req-46658835-2c7c-4c1d-91c8-b590d1633451 service nova] [instance: bba6c92b-cac3-4677-a8f4-57a2704fc685] Detach interface failed, port_id=b2c7fd20-28f2-42c2-9cdd-d3ad7d14169a, reason: Instance bba6c92b-cac3-4677-a8f4-57a2704fc685 could not be found. {{(pid=62503) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11261}} [ 1023.097823] env[62503]: DEBUG oslo_concurrency.lockutils [None req-4730124b-f040-4789-afea-1f1126d5d631 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.193s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1023.205630] env[62503]: INFO nova.compute.manager [-] [instance: bba6c92b-cac3-4677-a8f4-57a2704fc685] Took 1.72 seconds to deallocate network for instance. [ 1023.451368] env[62503]: DEBUG oslo_vmware.api [None req-9414a127-a077-42ca-aa67-a6c8616f966f tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Task: {'id': task-1388154, 'name': Rename_Task, 'duration_secs': 0.134976} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1023.451694] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-9414a127-a077-42ca-aa67-a6c8616f966f tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: d811353d-a484-4c15-abfa-3ebbd37816fc] Powering on the VM {{(pid=62503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1023.451968] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f5752e30-3472-4c97-b16e-b523dd3446ce {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.458024] env[62503]: DEBUG oslo_vmware.api [None req-9414a127-a077-42ca-aa67-a6c8616f966f tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Waiting for the task: (returnval){ [ 1023.458024] env[62503]: value = "task-1388155" [ 1023.458024] env[62503]: _type = "Task" [ 1023.458024] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1023.465352] env[62503]: DEBUG oslo_vmware.api [None req-9414a127-a077-42ca-aa67-a6c8616f966f tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Task: {'id': task-1388155, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.672305] env[62503]: INFO nova.scheduler.client.report [None req-4730124b-f040-4789-afea-1f1126d5d631 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Deleted allocation for migration b34ab829-d7d5-456a-a7c1-b0389514e668 [ 1023.711705] env[62503]: DEBUG oslo_concurrency.lockutils [None req-97f06f8d-0586-4a0a-97e9-6bf776957567 tempest-ServersV294TestFqdnHostnames-893294453 tempest-ServersV294TestFqdnHostnames-893294453-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1023.712036] env[62503]: DEBUG oslo_concurrency.lockutils [None req-97f06f8d-0586-4a0a-97e9-6bf776957567 tempest-ServersV294TestFqdnHostnames-893294453 tempest-ServersV294TestFqdnHostnames-893294453-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1023.712279] env[62503]: DEBUG nova.objects.instance [None req-97f06f8d-0586-4a0a-97e9-6bf776957567 tempest-ServersV294TestFqdnHostnames-893294453 tempest-ServersV294TestFqdnHostnames-893294453-project-member] Lazy-loading 'resources' on Instance uuid bba6c92b-cac3-4677-a8f4-57a2704fc685 {{(pid=62503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1023.969598] env[62503]: DEBUG oslo_vmware.api [None req-9414a127-a077-42ca-aa67-a6c8616f966f tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Task: {'id': task-1388155, 'name': PowerOnVM_Task, 'duration_secs': 0.464111} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1023.969960] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-9414a127-a077-42ca-aa67-a6c8616f966f tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: d811353d-a484-4c15-abfa-3ebbd37816fc] Powered on the VM {{(pid=62503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1023.970109] env[62503]: INFO nova.compute.manager [None req-9414a127-a077-42ca-aa67-a6c8616f966f tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: d811353d-a484-4c15-abfa-3ebbd37816fc] Took 7.37 seconds to spawn the instance on the hypervisor. [ 1023.970265] env[62503]: DEBUG nova.compute.manager [None req-9414a127-a077-42ca-aa67-a6c8616f966f tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: d811353d-a484-4c15-abfa-3ebbd37816fc] Checking state {{(pid=62503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1794}} [ 1023.971066] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29738456-177a-429c-9282-31d28751803b {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.180597] env[62503]: DEBUG oslo_concurrency.lockutils [None req-4730124b-f040-4789-afea-1f1126d5d631 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Lock "c1a41261-03d3-4dde-9b90-68bdec1a548b" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 6.309s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1024.304596] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e527eac-7556-425b-bc52-42b6936b4bc3 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.312344] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31471b69-dc75-4bcc-86b2-9ea01b9c4e68 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.343612] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11c89418-5e10-48a1-82d6-23b6df37b59d {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.352731] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-144f058b-1e61-4397-9be3-b3bd71296867 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.367755] env[62503]: DEBUG nova.compute.provider_tree [None req-97f06f8d-0586-4a0a-97e9-6bf776957567 tempest-ServersV294TestFqdnHostnames-893294453 tempest-ServersV294TestFqdnHostnames-893294453-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1024.491633] env[62503]: INFO nova.compute.manager [None req-9414a127-a077-42ca-aa67-a6c8616f966f tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: d811353d-a484-4c15-abfa-3ebbd37816fc] Took 14.04 seconds to build instance. [ 1024.588228] env[62503]: DEBUG nova.compute.manager [req-ba288e11-5882-4cc0-92b4-ba1f76f2a1f7 req-5bbe1d5a-cbdc-490e-965f-5dd1bee2dc82 service nova] [instance: dd6341e2-cd68-4d12-80e7-51184d448764] Received event network-changed-705c4161-6bc5-4a66-af1d-c76f64ef1a65 {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 1024.588756] env[62503]: DEBUG nova.compute.manager [req-ba288e11-5882-4cc0-92b4-ba1f76f2a1f7 req-5bbe1d5a-cbdc-490e-965f-5dd1bee2dc82 service nova] [instance: dd6341e2-cd68-4d12-80e7-51184d448764] Refreshing instance network info cache due to event network-changed-705c4161-6bc5-4a66-af1d-c76f64ef1a65. {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11432}} [ 1024.589069] env[62503]: DEBUG oslo_concurrency.lockutils [req-ba288e11-5882-4cc0-92b4-ba1f76f2a1f7 req-5bbe1d5a-cbdc-490e-965f-5dd1bee2dc82 service nova] Acquiring lock "refresh_cache-dd6341e2-cd68-4d12-80e7-51184d448764" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1024.589230] env[62503]: DEBUG oslo_concurrency.lockutils [req-ba288e11-5882-4cc0-92b4-ba1f76f2a1f7 req-5bbe1d5a-cbdc-490e-965f-5dd1bee2dc82 service nova] Acquired lock "refresh_cache-dd6341e2-cd68-4d12-80e7-51184d448764" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1024.589398] env[62503]: DEBUG nova.network.neutron [req-ba288e11-5882-4cc0-92b4-ba1f76f2a1f7 req-5bbe1d5a-cbdc-490e-965f-5dd1bee2dc82 service nova] [instance: dd6341e2-cd68-4d12-80e7-51184d448764] Refreshing network info cache for port 705c4161-6bc5-4a66-af1d-c76f64ef1a65 {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1024.871684] env[62503]: DEBUG nova.scheduler.client.report [None req-97f06f8d-0586-4a0a-97e9-6bf776957567 tempest-ServersV294TestFqdnHostnames-893294453 tempest-ServersV294TestFqdnHostnames-893294453-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1024.994244] env[62503]: DEBUG oslo_concurrency.lockutils [None req-9414a127-a077-42ca-aa67-a6c8616f966f tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Lock "d811353d-a484-4c15-abfa-3ebbd37816fc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.548s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1025.319108] env[62503]: DEBUG oslo_concurrency.lockutils [None req-bf683985-34c8-4bfe-8a21-6406eee38263 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Acquiring lock "d811353d-a484-4c15-abfa-3ebbd37816fc" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1025.319452] env[62503]: DEBUG oslo_concurrency.lockutils [None req-bf683985-34c8-4bfe-8a21-6406eee38263 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Lock "d811353d-a484-4c15-abfa-3ebbd37816fc" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1025.319618] env[62503]: DEBUG nova.compute.manager [None req-bf683985-34c8-4bfe-8a21-6406eee38263 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: d811353d-a484-4c15-abfa-3ebbd37816fc] Checking state {{(pid=62503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1794}} [ 1025.320971] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0791036-b566-4080-bfe2-9baffc8aa124 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.327846] env[62503]: DEBUG nova.compute.manager [None req-bf683985-34c8-4bfe-8a21-6406eee38263 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: d811353d-a484-4c15-abfa-3ebbd37816fc] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62503) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3375}} [ 1025.328530] env[62503]: DEBUG nova.objects.instance [None req-bf683985-34c8-4bfe-8a21-6406eee38263 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Lazy-loading 'flavor' on Instance uuid d811353d-a484-4c15-abfa-3ebbd37816fc {{(pid=62503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1025.340635] env[62503]: DEBUG nova.network.neutron [req-ba288e11-5882-4cc0-92b4-ba1f76f2a1f7 req-5bbe1d5a-cbdc-490e-965f-5dd1bee2dc82 service nova] [instance: dd6341e2-cd68-4d12-80e7-51184d448764] Updated VIF entry in instance network info cache for port 705c4161-6bc5-4a66-af1d-c76f64ef1a65. {{(pid=62503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1025.341015] env[62503]: DEBUG nova.network.neutron [req-ba288e11-5882-4cc0-92b4-ba1f76f2a1f7 req-5bbe1d5a-cbdc-490e-965f-5dd1bee2dc82 service nova] [instance: dd6341e2-cd68-4d12-80e7-51184d448764] Updating instance_info_cache with network_info: [{"id": "705c4161-6bc5-4a66-af1d-c76f64ef1a65", "address": "fa:16:3e:39:42:0e", "network": {"id": "ef338e2b-4aa8-4605-8748-910d0d3a3079", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-477257305-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.180", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "12a42517cf8f4ad3836f2f95e8833dd4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "13e71dbb-4279-427c-b39d-ba5df9895e58", "external-id": "nsx-vlan-transportzone-417", "segmentation_id": 417, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap705c4161-6b", "ovs_interfaceid": "705c4161-6bc5-4a66-af1d-c76f64ef1a65", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1025.377139] env[62503]: DEBUG oslo_concurrency.lockutils [None req-97f06f8d-0586-4a0a-97e9-6bf776957567 tempest-ServersV294TestFqdnHostnames-893294453 tempest-ServersV294TestFqdnHostnames-893294453-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.665s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1025.402498] env[62503]: INFO nova.scheduler.client.report [None req-97f06f8d-0586-4a0a-97e9-6bf776957567 tempest-ServersV294TestFqdnHostnames-893294453 tempest-ServersV294TestFqdnHostnames-893294453-project-member] Deleted allocations for instance bba6c92b-cac3-4677-a8f4-57a2704fc685 [ 1025.403923] env[62503]: DEBUG oslo_concurrency.lockutils [None req-4730124b-f040-4789-afea-1f1126d5d631 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Acquiring lock "c1a41261-03d3-4dde-9b90-68bdec1a548b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1025.404429] env[62503]: DEBUG oslo_concurrency.lockutils [None req-4730124b-f040-4789-afea-1f1126d5d631 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Lock "c1a41261-03d3-4dde-9b90-68bdec1a548b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1025.404759] env[62503]: DEBUG oslo_concurrency.lockutils [None req-4730124b-f040-4789-afea-1f1126d5d631 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Acquiring lock "c1a41261-03d3-4dde-9b90-68bdec1a548b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1025.405086] env[62503]: DEBUG oslo_concurrency.lockutils [None req-4730124b-f040-4789-afea-1f1126d5d631 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Lock "c1a41261-03d3-4dde-9b90-68bdec1a548b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1025.405376] env[62503]: DEBUG oslo_concurrency.lockutils [None req-4730124b-f040-4789-afea-1f1126d5d631 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Lock "c1a41261-03d3-4dde-9b90-68bdec1a548b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1025.410766] env[62503]: INFO nova.compute.manager [None req-4730124b-f040-4789-afea-1f1126d5d631 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: c1a41261-03d3-4dde-9b90-68bdec1a548b] Terminating instance [ 1025.413127] env[62503]: DEBUG nova.compute.manager [None req-4730124b-f040-4789-afea-1f1126d5d631 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: c1a41261-03d3-4dde-9b90-68bdec1a548b] Start destroying the instance on the hypervisor. {{(pid=62503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3156}} [ 1025.413349] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-4730124b-f040-4789-afea-1f1126d5d631 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: c1a41261-03d3-4dde-9b90-68bdec1a548b] Destroying instance {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1025.414608] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c01e7abb-0a61-4321-a3b4-d97a34158b89 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.424243] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-4730124b-f040-4789-afea-1f1126d5d631 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: c1a41261-03d3-4dde-9b90-68bdec1a548b] Powering off the VM {{(pid=62503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1025.424497] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-34e8a762-6657-4462-8d69-65835da58681 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.431579] env[62503]: DEBUG oslo_vmware.api [None req-4730124b-f040-4789-afea-1f1126d5d631 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Waiting for the task: (returnval){ [ 1025.431579] env[62503]: value = "task-1388156" [ 1025.431579] env[62503]: _type = "Task" [ 1025.431579] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1025.440532] env[62503]: DEBUG oslo_vmware.api [None req-4730124b-f040-4789-afea-1f1126d5d631 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Task: {'id': task-1388156, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.712117] env[62503]: DEBUG oslo_concurrency.lockutils [None req-6460431f-a5cb-4166-b8d8-45fcdc14b1d7 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Acquiring lock "ccc542a3-ff01-42ca-965e-706bed4c6e07" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1025.712377] env[62503]: DEBUG oslo_concurrency.lockutils [None req-6460431f-a5cb-4166-b8d8-45fcdc14b1d7 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Lock "ccc542a3-ff01-42ca-965e-706bed4c6e07" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1025.844564] env[62503]: DEBUG oslo_concurrency.lockutils [req-ba288e11-5882-4cc0-92b4-ba1f76f2a1f7 req-5bbe1d5a-cbdc-490e-965f-5dd1bee2dc82 service nova] Releasing lock "refresh_cache-dd6341e2-cd68-4d12-80e7-51184d448764" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1025.916443] env[62503]: DEBUG oslo_concurrency.lockutils [None req-97f06f8d-0586-4a0a-97e9-6bf776957567 tempest-ServersV294TestFqdnHostnames-893294453 tempest-ServersV294TestFqdnHostnames-893294453-project-member] Lock "bba6c92b-cac3-4677-a8f4-57a2704fc685" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.546s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1025.942407] env[62503]: DEBUG oslo_vmware.api [None req-4730124b-f040-4789-afea-1f1126d5d631 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Task: {'id': task-1388156, 'name': PowerOffVM_Task, 'duration_secs': 0.180839} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1025.942789] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-4730124b-f040-4789-afea-1f1126d5d631 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: c1a41261-03d3-4dde-9b90-68bdec1a548b] Powered off the VM {{(pid=62503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1025.942969] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-4730124b-f040-4789-afea-1f1126d5d631 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: c1a41261-03d3-4dde-9b90-68bdec1a548b] Unregistering the VM {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1025.943238] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8a831c20-93d0-4ca4-ab47-a0617da67db1 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.010560] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-4730124b-f040-4789-afea-1f1126d5d631 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: c1a41261-03d3-4dde-9b90-68bdec1a548b] Unregistered the VM {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1026.010896] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-4730124b-f040-4789-afea-1f1126d5d631 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: c1a41261-03d3-4dde-9b90-68bdec1a548b] Deleting contents of the VM from datastore datastore1 {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1026.010943] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-4730124b-f040-4789-afea-1f1126d5d631 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Deleting the datastore file [datastore1] c1a41261-03d3-4dde-9b90-68bdec1a548b {{(pid=62503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1026.011252] env[62503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-95e4a775-6e2b-463a-af1a-8d74f9aca8a1 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.020176] env[62503]: DEBUG oslo_vmware.api [None req-4730124b-f040-4789-afea-1f1126d5d631 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Waiting for the task: (returnval){ [ 1026.020176] env[62503]: value = "task-1388158" [ 1026.020176] env[62503]: _type = "Task" [ 1026.020176] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1026.029345] env[62503]: DEBUG oslo_vmware.api [None req-4730124b-f040-4789-afea-1f1126d5d631 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Task: {'id': task-1388158, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.216401] env[62503]: DEBUG nova.compute.utils [None req-6460431f-a5cb-4166-b8d8-45fcdc14b1d7 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Using /dev/sd instead of None {{(pid=62503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1026.336140] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf683985-34c8-4bfe-8a21-6406eee38263 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: d811353d-a484-4c15-abfa-3ebbd37816fc] Powering off the VM {{(pid=62503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1026.336542] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8c2fded2-1885-4063-9ec1-beecb57fde95 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.346530] env[62503]: DEBUG oslo_vmware.api [None req-bf683985-34c8-4bfe-8a21-6406eee38263 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Waiting for the task: (returnval){ [ 1026.346530] env[62503]: value = "task-1388159" [ 1026.346530] env[62503]: _type = "Task" [ 1026.346530] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1026.361298] env[62503]: DEBUG oslo_vmware.api [None req-bf683985-34c8-4bfe-8a21-6406eee38263 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Task: {'id': task-1388159, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.426236] env[62503]: DEBUG oslo_concurrency.lockutils [None req-dae72632-7a91-47ab-a7d6-43626c24f7e1 tempest-ServerAddressesNegativeTestJSON-300837030 tempest-ServerAddressesNegativeTestJSON-300837030-project-member] Acquiring lock "5fb35ae6-bfc6-4039-aa43-de8c550aacde" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1026.427186] env[62503]: DEBUG oslo_concurrency.lockutils [None req-dae72632-7a91-47ab-a7d6-43626c24f7e1 tempest-ServerAddressesNegativeTestJSON-300837030 tempest-ServerAddressesNegativeTestJSON-300837030-project-member] Lock "5fb35ae6-bfc6-4039-aa43-de8c550aacde" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1026.531264] env[62503]: DEBUG oslo_vmware.api [None req-4730124b-f040-4789-afea-1f1126d5d631 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Task: {'id': task-1388158, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.21543} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1026.531484] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-4730124b-f040-4789-afea-1f1126d5d631 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Deleted the datastore file {{(pid=62503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1026.531711] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-4730124b-f040-4789-afea-1f1126d5d631 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: c1a41261-03d3-4dde-9b90-68bdec1a548b] Deleted contents of the VM from datastore datastore1 {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1026.531908] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-4730124b-f040-4789-afea-1f1126d5d631 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: c1a41261-03d3-4dde-9b90-68bdec1a548b] Instance destroyed {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1026.532169] env[62503]: INFO nova.compute.manager [None req-4730124b-f040-4789-afea-1f1126d5d631 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] [instance: c1a41261-03d3-4dde-9b90-68bdec1a548b] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1026.532428] env[62503]: DEBUG oslo.service.loopingcall [None req-4730124b-f040-4789-afea-1f1126d5d631 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1026.532621] env[62503]: DEBUG nova.compute.manager [-] [instance: c1a41261-03d3-4dde-9b90-68bdec1a548b] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 1026.532748] env[62503]: DEBUG nova.network.neutron [-] [instance: c1a41261-03d3-4dde-9b90-68bdec1a548b] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1026.720012] env[62503]: DEBUG oslo_concurrency.lockutils [None req-6460431f-a5cb-4166-b8d8-45fcdc14b1d7 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Lock "ccc542a3-ff01-42ca-965e-706bed4c6e07" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1026.856872] env[62503]: DEBUG oslo_vmware.api [None req-bf683985-34c8-4bfe-8a21-6406eee38263 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Task: {'id': task-1388159, 'name': PowerOffVM_Task, 'duration_secs': 0.204862} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1026.857243] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf683985-34c8-4bfe-8a21-6406eee38263 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: d811353d-a484-4c15-abfa-3ebbd37816fc] Powered off the VM {{(pid=62503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1026.857453] env[62503]: DEBUG nova.compute.manager [None req-bf683985-34c8-4bfe-8a21-6406eee38263 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: d811353d-a484-4c15-abfa-3ebbd37816fc] Checking state {{(pid=62503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1794}} [ 1026.858541] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4e6dedc-d1dc-4d2d-ad61-6ad0a60ac8b6 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.898661] env[62503]: DEBUG nova.compute.manager [req-7e7ccebd-644c-41c5-a3e0-e3242c7f804b req-e80fa325-bcc7-4f71-ac8c-ebd6ab7acfb6 service nova] [instance: c1a41261-03d3-4dde-9b90-68bdec1a548b] Received event network-vif-deleted-9d56fc29-052f-4ca8-908c-17b026450550 {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 1026.898841] env[62503]: INFO nova.compute.manager [req-7e7ccebd-644c-41c5-a3e0-e3242c7f804b req-e80fa325-bcc7-4f71-ac8c-ebd6ab7acfb6 service nova] [instance: c1a41261-03d3-4dde-9b90-68bdec1a548b] Neutron deleted interface 9d56fc29-052f-4ca8-908c-17b026450550; detaching it from the instance and deleting it from the info cache [ 1026.899038] env[62503]: DEBUG nova.network.neutron [req-7e7ccebd-644c-41c5-a3e0-e3242c7f804b req-e80fa325-bcc7-4f71-ac8c-ebd6ab7acfb6 service nova] [instance: c1a41261-03d3-4dde-9b90-68bdec1a548b] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1026.930070] env[62503]: DEBUG nova.compute.manager [None req-dae72632-7a91-47ab-a7d6-43626c24f7e1 tempest-ServerAddressesNegativeTestJSON-300837030 tempest-ServerAddressesNegativeTestJSON-300837030-project-member] [instance: 5fb35ae6-bfc6-4039-aa43-de8c550aacde] Starting instance... {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 1027.373974] env[62503]: DEBUG nova.network.neutron [-] [instance: c1a41261-03d3-4dde-9b90-68bdec1a548b] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1027.378297] env[62503]: DEBUG oslo_concurrency.lockutils [None req-bf683985-34c8-4bfe-8a21-6406eee38263 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Lock "d811353d-a484-4c15-abfa-3ebbd37816fc" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.056s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1027.401735] env[62503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b19dfa94-f34e-491d-b0af-00b826b01d5b {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.411673] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ef9a22f-6a39-4e4e-a5ae-72bfbc8f4e11 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.440803] env[62503]: DEBUG nova.compute.manager [req-7e7ccebd-644c-41c5-a3e0-e3242c7f804b req-e80fa325-bcc7-4f71-ac8c-ebd6ab7acfb6 service nova] [instance: c1a41261-03d3-4dde-9b90-68bdec1a548b] Detach interface failed, port_id=9d56fc29-052f-4ca8-908c-17b026450550, reason: Instance c1a41261-03d3-4dde-9b90-68bdec1a548b could not be found. {{(pid=62503) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11261}} [ 1027.465518] env[62503]: DEBUG oslo_concurrency.lockutils [None req-dae72632-7a91-47ab-a7d6-43626c24f7e1 tempest-ServerAddressesNegativeTestJSON-300837030 tempest-ServerAddressesNegativeTestJSON-300837030-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1027.465794] env[62503]: DEBUG oslo_concurrency.lockutils [None req-dae72632-7a91-47ab-a7d6-43626c24f7e1 tempest-ServerAddressesNegativeTestJSON-300837030 tempest-ServerAddressesNegativeTestJSON-300837030-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1027.467383] env[62503]: INFO nova.compute.claims [None req-dae72632-7a91-47ab-a7d6-43626c24f7e1 tempest-ServerAddressesNegativeTestJSON-300837030 tempest-ServerAddressesNegativeTestJSON-300837030-project-member] [instance: 5fb35ae6-bfc6-4039-aa43-de8c550aacde] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1027.795013] env[62503]: DEBUG oslo_concurrency.lockutils [None req-6460431f-a5cb-4166-b8d8-45fcdc14b1d7 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Acquiring lock "ccc542a3-ff01-42ca-965e-706bed4c6e07" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1027.795311] env[62503]: DEBUG oslo_concurrency.lockutils [None req-6460431f-a5cb-4166-b8d8-45fcdc14b1d7 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Lock "ccc542a3-ff01-42ca-965e-706bed4c6e07" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1027.795549] env[62503]: INFO nova.compute.manager [None req-6460431f-a5cb-4166-b8d8-45fcdc14b1d7 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: ccc542a3-ff01-42ca-965e-706bed4c6e07] Attaching volume 2e377e60-04b4-4faa-8609-29e7538d02e0 to /dev/sdb [ 1027.828960] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b766f95-6a60-4603-b4d6-6139393e3257 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.837491] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33bf8f92-c542-4212-9220-5fe1dea5838e {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.852355] env[62503]: DEBUG nova.virt.block_device [None req-6460431f-a5cb-4166-b8d8-45fcdc14b1d7 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: ccc542a3-ff01-42ca-965e-706bed4c6e07] Updating existing volume attachment record: 7d1408d5-3b17-44c2-9a4d-9c41afa00b0d {{(pid=62503) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1027.880356] env[62503]: INFO nova.compute.manager [-] [instance: c1a41261-03d3-4dde-9b90-68bdec1a548b] Took 1.35 seconds to deallocate network for instance. [ 1028.138021] env[62503]: DEBUG oslo_concurrency.lockutils [None req-ede16f5c-a406-412b-b3cc-4c2aaf0638b3 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Acquiring lock "d811353d-a484-4c15-abfa-3ebbd37816fc" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1028.138021] env[62503]: DEBUG oslo_concurrency.lockutils [None req-ede16f5c-a406-412b-b3cc-4c2aaf0638b3 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Lock "d811353d-a484-4c15-abfa-3ebbd37816fc" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1028.138021] env[62503]: DEBUG oslo_concurrency.lockutils [None req-ede16f5c-a406-412b-b3cc-4c2aaf0638b3 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Acquiring lock "d811353d-a484-4c15-abfa-3ebbd37816fc-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1028.138021] env[62503]: DEBUG oslo_concurrency.lockutils [None req-ede16f5c-a406-412b-b3cc-4c2aaf0638b3 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Lock "d811353d-a484-4c15-abfa-3ebbd37816fc-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1028.138021] env[62503]: DEBUG oslo_concurrency.lockutils [None req-ede16f5c-a406-412b-b3cc-4c2aaf0638b3 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Lock "d811353d-a484-4c15-abfa-3ebbd37816fc-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1028.139849] env[62503]: INFO nova.compute.manager [None req-ede16f5c-a406-412b-b3cc-4c2aaf0638b3 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: d811353d-a484-4c15-abfa-3ebbd37816fc] Terminating instance [ 1028.142211] env[62503]: DEBUG nova.compute.manager [None req-ede16f5c-a406-412b-b3cc-4c2aaf0638b3 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: d811353d-a484-4c15-abfa-3ebbd37816fc] Start destroying the instance on the hypervisor. {{(pid=62503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3156}} [ 1028.142414] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-ede16f5c-a406-412b-b3cc-4c2aaf0638b3 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: d811353d-a484-4c15-abfa-3ebbd37816fc] Destroying instance {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1028.143283] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98273b52-b6f5-4cb9-abe9-5da4dd3ea136 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.151938] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-ede16f5c-a406-412b-b3cc-4c2aaf0638b3 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: d811353d-a484-4c15-abfa-3ebbd37816fc] Unregistering the VM {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1028.151938] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d45e2b22-f231-4bb6-a5ba-ff79ca8f8cfa {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.222880] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-ede16f5c-a406-412b-b3cc-4c2aaf0638b3 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: d811353d-a484-4c15-abfa-3ebbd37816fc] Unregistered the VM {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1028.223319] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-ede16f5c-a406-412b-b3cc-4c2aaf0638b3 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: d811353d-a484-4c15-abfa-3ebbd37816fc] Deleting contents of the VM from datastore datastore2 {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1028.223574] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-ede16f5c-a406-412b-b3cc-4c2aaf0638b3 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Deleting the datastore file [datastore2] d811353d-a484-4c15-abfa-3ebbd37816fc {{(pid=62503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1028.223907] env[62503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ad9f755e-a153-44f2-ab4f-d297754898e1 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.231332] env[62503]: DEBUG oslo_vmware.api [None req-ede16f5c-a406-412b-b3cc-4c2aaf0638b3 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Waiting for the task: (returnval){ [ 1028.231332] env[62503]: value = "task-1388164" [ 1028.231332] env[62503]: _type = "Task" [ 1028.231332] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1028.239470] env[62503]: DEBUG oslo_vmware.api [None req-ede16f5c-a406-412b-b3cc-4c2aaf0638b3 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Task: {'id': task-1388164, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.388083] env[62503]: DEBUG oslo_concurrency.lockutils [None req-4730124b-f040-4789-afea-1f1126d5d631 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1028.570499] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-151dd27e-88f3-424b-9c59-400e7a4e7afe {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.578140] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-496df33c-3419-4470-ba4e-45f5c1609682 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.609981] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06dd0123-0c9b-4628-994e-f0124649a6a0 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.617655] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a95823c1-88cc-4275-8ca4-267175771f4d {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.631069] env[62503]: DEBUG nova.compute.provider_tree [None req-dae72632-7a91-47ab-a7d6-43626c24f7e1 tempest-ServerAddressesNegativeTestJSON-300837030 tempest-ServerAddressesNegativeTestJSON-300837030-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1028.741467] env[62503]: DEBUG oslo_vmware.api [None req-ede16f5c-a406-412b-b3cc-4c2aaf0638b3 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Task: {'id': task-1388164, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.150499} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1028.741770] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-ede16f5c-a406-412b-b3cc-4c2aaf0638b3 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Deleted the datastore file {{(pid=62503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1028.741964] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-ede16f5c-a406-412b-b3cc-4c2aaf0638b3 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: d811353d-a484-4c15-abfa-3ebbd37816fc] Deleted contents of the VM from datastore datastore2 {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1028.742229] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-ede16f5c-a406-412b-b3cc-4c2aaf0638b3 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: d811353d-a484-4c15-abfa-3ebbd37816fc] Instance destroyed {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1028.742416] env[62503]: INFO nova.compute.manager [None req-ede16f5c-a406-412b-b3cc-4c2aaf0638b3 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] [instance: d811353d-a484-4c15-abfa-3ebbd37816fc] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1028.742663] env[62503]: DEBUG oslo.service.loopingcall [None req-ede16f5c-a406-412b-b3cc-4c2aaf0638b3 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1028.742859] env[62503]: DEBUG nova.compute.manager [-] [instance: d811353d-a484-4c15-abfa-3ebbd37816fc] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 1028.742953] env[62503]: DEBUG nova.network.neutron [-] [instance: d811353d-a484-4c15-abfa-3ebbd37816fc] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1029.126811] env[62503]: DEBUG nova.compute.manager [req-59553369-7a81-4828-b22d-d1d1e0686b38 req-2a7afe82-ca77-45cd-b7a8-b31f9c87b81b service nova] [instance: d811353d-a484-4c15-abfa-3ebbd37816fc] Received event network-vif-deleted-7c7b64f1-ccd7-4141-a2a8-fe156a1d595d {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 1029.127031] env[62503]: INFO nova.compute.manager [req-59553369-7a81-4828-b22d-d1d1e0686b38 req-2a7afe82-ca77-45cd-b7a8-b31f9c87b81b service nova] [instance: d811353d-a484-4c15-abfa-3ebbd37816fc] Neutron deleted interface 7c7b64f1-ccd7-4141-a2a8-fe156a1d595d; detaching it from the instance and deleting it from the info cache [ 1029.128837] env[62503]: DEBUG nova.network.neutron [req-59553369-7a81-4828-b22d-d1d1e0686b38 req-2a7afe82-ca77-45cd-b7a8-b31f9c87b81b service nova] [instance: d811353d-a484-4c15-abfa-3ebbd37816fc] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1029.134765] env[62503]: DEBUG nova.scheduler.client.report [None req-dae72632-7a91-47ab-a7d6-43626c24f7e1 tempest-ServerAddressesNegativeTestJSON-300837030 tempest-ServerAddressesNegativeTestJSON-300837030-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1029.606424] env[62503]: DEBUG nova.network.neutron [-] [instance: d811353d-a484-4c15-abfa-3ebbd37816fc] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1029.629738] env[62503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0a6aa9d0-cddd-4a87-a082-efba585eb8af {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.640143] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2bc19e9-7943-451b-bafe-e986c1d3c67d {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.651089] env[62503]: DEBUG oslo_concurrency.lockutils [None req-dae72632-7a91-47ab-a7d6-43626c24f7e1 tempest-ServerAddressesNegativeTestJSON-300837030 tempest-ServerAddressesNegativeTestJSON-300837030-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.185s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1029.651759] env[62503]: DEBUG nova.compute.manager [None req-dae72632-7a91-47ab-a7d6-43626c24f7e1 tempest-ServerAddressesNegativeTestJSON-300837030 tempest-ServerAddressesNegativeTestJSON-300837030-project-member] [instance: 5fb35ae6-bfc6-4039-aa43-de8c550aacde] Start building networks asynchronously for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2832}} [ 1029.654903] env[62503]: DEBUG oslo_concurrency.lockutils [None req-4730124b-f040-4789-afea-1f1126d5d631 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.267s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1029.655268] env[62503]: DEBUG oslo_concurrency.lockutils [None req-4730124b-f040-4789-afea-1f1126d5d631 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1029.675896] env[62503]: DEBUG nova.compute.manager [req-59553369-7a81-4828-b22d-d1d1e0686b38 req-2a7afe82-ca77-45cd-b7a8-b31f9c87b81b service nova] [instance: d811353d-a484-4c15-abfa-3ebbd37816fc] Detach interface failed, port_id=7c7b64f1-ccd7-4141-a2a8-fe156a1d595d, reason: Instance d811353d-a484-4c15-abfa-3ebbd37816fc could not be found. {{(pid=62503) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11261}} [ 1029.684106] env[62503]: INFO nova.scheduler.client.report [None req-4730124b-f040-4789-afea-1f1126d5d631 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Deleted allocations for instance c1a41261-03d3-4dde-9b90-68bdec1a548b [ 1030.110057] env[62503]: INFO nova.compute.manager [-] [instance: d811353d-a484-4c15-abfa-3ebbd37816fc] Took 1.37 seconds to deallocate network for instance. [ 1030.158018] env[62503]: DEBUG nova.compute.utils [None req-dae72632-7a91-47ab-a7d6-43626c24f7e1 tempest-ServerAddressesNegativeTestJSON-300837030 tempest-ServerAddressesNegativeTestJSON-300837030-project-member] Using /dev/sd instead of None {{(pid=62503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1030.158018] env[62503]: DEBUG nova.compute.manager [None req-dae72632-7a91-47ab-a7d6-43626c24f7e1 tempest-ServerAddressesNegativeTestJSON-300837030 tempest-ServerAddressesNegativeTestJSON-300837030-project-member] [instance: 5fb35ae6-bfc6-4039-aa43-de8c550aacde] Allocating IP information in the background. {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 1030.158018] env[62503]: DEBUG nova.network.neutron [None req-dae72632-7a91-47ab-a7d6-43626c24f7e1 tempest-ServerAddressesNegativeTestJSON-300837030 tempest-ServerAddressesNegativeTestJSON-300837030-project-member] [instance: 5fb35ae6-bfc6-4039-aa43-de8c550aacde] allocate_for_instance() {{(pid=62503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1030.196015] env[62503]: DEBUG oslo_concurrency.lockutils [None req-4730124b-f040-4789-afea-1f1126d5d631 tempest-DeleteServersTestJSON-877433316 tempest-DeleteServersTestJSON-877433316-project-member] Lock "c1a41261-03d3-4dde-9b90-68bdec1a548b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 4.787s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1030.216948] env[62503]: DEBUG nova.policy [None req-dae72632-7a91-47ab-a7d6-43626c24f7e1 tempest-ServerAddressesNegativeTestJSON-300837030 tempest-ServerAddressesNegativeTestJSON-300837030-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bd653a0929b14d328f86a14bec7b0beb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c130d7bdc38a4a98ba556abf5910cc5c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62503) authorize /opt/stack/nova/nova/policy.py:201}} [ 1030.615111] env[62503]: DEBUG oslo_concurrency.lockutils [None req-ede16f5c-a406-412b-b3cc-4c2aaf0638b3 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1030.615111] env[62503]: DEBUG oslo_concurrency.lockutils [None req-ede16f5c-a406-412b-b3cc-4c2aaf0638b3 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1030.615445] env[62503]: DEBUG nova.objects.instance [None req-ede16f5c-a406-412b-b3cc-4c2aaf0638b3 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Lazy-loading 'resources' on Instance uuid d811353d-a484-4c15-abfa-3ebbd37816fc {{(pid=62503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1030.661220] env[62503]: DEBUG nova.compute.manager [None req-dae72632-7a91-47ab-a7d6-43626c24f7e1 tempest-ServerAddressesNegativeTestJSON-300837030 tempest-ServerAddressesNegativeTestJSON-300837030-project-member] [instance: 5fb35ae6-bfc6-4039-aa43-de8c550aacde] Start building block device mappings for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2867}} [ 1030.760098] env[62503]: DEBUG nova.network.neutron [None req-dae72632-7a91-47ab-a7d6-43626c24f7e1 tempest-ServerAddressesNegativeTestJSON-300837030 tempest-ServerAddressesNegativeTestJSON-300837030-project-member] [instance: 5fb35ae6-bfc6-4039-aa43-de8c550aacde] Successfully created port: 1cdefdaf-8339-4d4f-a744-b28fbeb535db {{(pid=62503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1031.232018] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3564d454-c92e-4b23-a0bd-3d82c39749cc {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.244926] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5691ad4d-19f7-4dcb-9669-e532fc4cc440 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.287017] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8eba66ed-7a7c-4153-a63a-aee61c62eb6d {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.293416] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d890904f-c003-4f9c-8679-5c21ec759dab {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.312030] env[62503]: DEBUG nova.compute.provider_tree [None req-ede16f5c-a406-412b-b3cc-4c2aaf0638b3 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1031.677921] env[62503]: DEBUG nova.compute.manager [None req-dae72632-7a91-47ab-a7d6-43626c24f7e1 tempest-ServerAddressesNegativeTestJSON-300837030 tempest-ServerAddressesNegativeTestJSON-300837030-project-member] [instance: 5fb35ae6-bfc6-4039-aa43-de8c550aacde] Start spawning the instance on the hypervisor. {{(pid=62503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2641}} [ 1031.701262] env[62503]: DEBUG nova.virt.hardware [None req-dae72632-7a91-47ab-a7d6-43626c24f7e1 tempest-ServerAddressesNegativeTestJSON-300837030 tempest-ServerAddressesNegativeTestJSON-300837030-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:26:20Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:26:03Z,direct_url=,disk_format='vmdk',id=8150ca02-f879-471d-8913-459408f127a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26d9ee75d25b4018b8bb1fb11c8bd98c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:26:04Z,virtual_size=,visibility=), allow threads: False {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1031.701262] env[62503]: DEBUG nova.virt.hardware [None req-dae72632-7a91-47ab-a7d6-43626c24f7e1 tempest-ServerAddressesNegativeTestJSON-300837030 tempest-ServerAddressesNegativeTestJSON-300837030-project-member] Flavor limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1031.701262] env[62503]: DEBUG nova.virt.hardware [None req-dae72632-7a91-47ab-a7d6-43626c24f7e1 tempest-ServerAddressesNegativeTestJSON-300837030 tempest-ServerAddressesNegativeTestJSON-300837030-project-member] Image limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1031.701262] env[62503]: DEBUG nova.virt.hardware [None req-dae72632-7a91-47ab-a7d6-43626c24f7e1 tempest-ServerAddressesNegativeTestJSON-300837030 tempest-ServerAddressesNegativeTestJSON-300837030-project-member] Flavor pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1031.701262] env[62503]: DEBUG nova.virt.hardware [None req-dae72632-7a91-47ab-a7d6-43626c24f7e1 tempest-ServerAddressesNegativeTestJSON-300837030 tempest-ServerAddressesNegativeTestJSON-300837030-project-member] Image pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1031.701262] env[62503]: DEBUG nova.virt.hardware [None req-dae72632-7a91-47ab-a7d6-43626c24f7e1 tempest-ServerAddressesNegativeTestJSON-300837030 tempest-ServerAddressesNegativeTestJSON-300837030-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1031.701262] env[62503]: DEBUG nova.virt.hardware [None req-dae72632-7a91-47ab-a7d6-43626c24f7e1 tempest-ServerAddressesNegativeTestJSON-300837030 tempest-ServerAddressesNegativeTestJSON-300837030-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1031.701262] env[62503]: DEBUG nova.virt.hardware [None req-dae72632-7a91-47ab-a7d6-43626c24f7e1 tempest-ServerAddressesNegativeTestJSON-300837030 tempest-ServerAddressesNegativeTestJSON-300837030-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1031.701714] env[62503]: DEBUG nova.virt.hardware [None req-dae72632-7a91-47ab-a7d6-43626c24f7e1 tempest-ServerAddressesNegativeTestJSON-300837030 tempest-ServerAddressesNegativeTestJSON-300837030-project-member] Got 1 possible topologies {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1031.702103] env[62503]: DEBUG nova.virt.hardware [None req-dae72632-7a91-47ab-a7d6-43626c24f7e1 tempest-ServerAddressesNegativeTestJSON-300837030 tempest-ServerAddressesNegativeTestJSON-300837030-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1031.702431] env[62503]: DEBUG nova.virt.hardware [None req-dae72632-7a91-47ab-a7d6-43626c24f7e1 tempest-ServerAddressesNegativeTestJSON-300837030 tempest-ServerAddressesNegativeTestJSON-300837030-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1031.705022] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e90c83b9-2c2b-4ac8-8935-88445689f449 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.712429] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fd6d4d6-2bc4-45a9-9e18-d956f0a9b250 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.756036] env[62503]: DEBUG oslo_concurrency.lockutils [None req-971d784d-b28d-4337-bc9f-8b2ee172d1d1 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Acquiring lock "987b6101-565e-4eb2-b8af-f9afd5be38ce" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1031.757295] env[62503]: DEBUG oslo_concurrency.lockutils [None req-971d784d-b28d-4337-bc9f-8b2ee172d1d1 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Lock "987b6101-565e-4eb2-b8af-f9afd5be38ce" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1031.820027] env[62503]: DEBUG nova.scheduler.client.report [None req-ede16f5c-a406-412b-b3cc-4c2aaf0638b3 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1032.128244] env[62503]: DEBUG oslo_concurrency.lockutils [None req-8f8175a8-01a8-4bc7-8603-56c0993b0c7d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Acquiring lock "485d3aba-6c0d-46c7-860b-c0dbd9c16498" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1032.128244] env[62503]: DEBUG oslo_concurrency.lockutils [None req-8f8175a8-01a8-4bc7-8603-56c0993b0c7d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Lock "485d3aba-6c0d-46c7-860b-c0dbd9c16498" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1032.259429] env[62503]: DEBUG nova.compute.manager [None req-971d784d-b28d-4337-bc9f-8b2ee172d1d1 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 987b6101-565e-4eb2-b8af-f9afd5be38ce] Starting instance... {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 1032.267769] env[62503]: DEBUG nova.compute.manager [req-e185b8a3-676c-4aab-b529-b182d69e5534 req-d4155625-1fcc-4aaa-99cc-f51ac8f5d804 service nova] [instance: 5fb35ae6-bfc6-4039-aa43-de8c550aacde] Received event network-vif-plugged-1cdefdaf-8339-4d4f-a744-b28fbeb535db {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 1032.267769] env[62503]: DEBUG oslo_concurrency.lockutils [req-e185b8a3-676c-4aab-b529-b182d69e5534 req-d4155625-1fcc-4aaa-99cc-f51ac8f5d804 service nova] Acquiring lock "5fb35ae6-bfc6-4039-aa43-de8c550aacde-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1032.268663] env[62503]: DEBUG oslo_concurrency.lockutils [req-e185b8a3-676c-4aab-b529-b182d69e5534 req-d4155625-1fcc-4aaa-99cc-f51ac8f5d804 service nova] Lock "5fb35ae6-bfc6-4039-aa43-de8c550aacde-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1032.269105] env[62503]: DEBUG oslo_concurrency.lockutils [req-e185b8a3-676c-4aab-b529-b182d69e5534 req-d4155625-1fcc-4aaa-99cc-f51ac8f5d804 service nova] Lock "5fb35ae6-bfc6-4039-aa43-de8c550aacde-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1032.269308] env[62503]: DEBUG nova.compute.manager [req-e185b8a3-676c-4aab-b529-b182d69e5534 req-d4155625-1fcc-4aaa-99cc-f51ac8f5d804 service nova] [instance: 5fb35ae6-bfc6-4039-aa43-de8c550aacde] No waiting events found dispatching network-vif-plugged-1cdefdaf-8339-4d4f-a744-b28fbeb535db {{(pid=62503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1032.269793] env[62503]: WARNING nova.compute.manager [req-e185b8a3-676c-4aab-b529-b182d69e5534 req-d4155625-1fcc-4aaa-99cc-f51ac8f5d804 service nova] [instance: 5fb35ae6-bfc6-4039-aa43-de8c550aacde] Received unexpected event network-vif-plugged-1cdefdaf-8339-4d4f-a744-b28fbeb535db for instance with vm_state building and task_state spawning. [ 1032.329099] env[62503]: DEBUG oslo_concurrency.lockutils [None req-ede16f5c-a406-412b-b3cc-4c2aaf0638b3 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.714s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1032.358114] env[62503]: INFO nova.scheduler.client.report [None req-ede16f5c-a406-412b-b3cc-4c2aaf0638b3 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Deleted allocations for instance d811353d-a484-4c15-abfa-3ebbd37816fc [ 1032.379879] env[62503]: DEBUG nova.network.neutron [None req-dae72632-7a91-47ab-a7d6-43626c24f7e1 tempest-ServerAddressesNegativeTestJSON-300837030 tempest-ServerAddressesNegativeTestJSON-300837030-project-member] [instance: 5fb35ae6-bfc6-4039-aa43-de8c550aacde] Successfully updated port: 1cdefdaf-8339-4d4f-a744-b28fbeb535db {{(pid=62503) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1032.409412] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-6460431f-a5cb-4166-b8d8-45fcdc14b1d7 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: ccc542a3-ff01-42ca-965e-706bed4c6e07] Volume attach. Driver type: vmdk {{(pid=62503) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1032.409701] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-6460431f-a5cb-4166-b8d8-45fcdc14b1d7 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: ccc542a3-ff01-42ca-965e-706bed4c6e07] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-294642', 'volume_id': '2e377e60-04b4-4faa-8609-29e7538d02e0', 'name': 'volume-2e377e60-04b4-4faa-8609-29e7538d02e0', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'ccc542a3-ff01-42ca-965e-706bed4c6e07', 'attached_at': '', 'detached_at': '', 'volume_id': '2e377e60-04b4-4faa-8609-29e7538d02e0', 'serial': '2e377e60-04b4-4faa-8609-29e7538d02e0'} {{(pid=62503) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1032.410615] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fec25116-f052-4b47-b152-ca5b00a84340 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.430932] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8873c923-01e3-4d3b-a5d4-bd935f482067 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.459397] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-6460431f-a5cb-4166-b8d8-45fcdc14b1d7 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: ccc542a3-ff01-42ca-965e-706bed4c6e07] Reconfiguring VM instance instance-0000005f to attach disk [datastore2] volume-2e377e60-04b4-4faa-8609-29e7538d02e0/volume-2e377e60-04b4-4faa-8609-29e7538d02e0.vmdk or device None with type thin {{(pid=62503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1032.459707] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0364960e-22d8-4b90-9b4a-92f40e89ec7e {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.479360] env[62503]: DEBUG oslo_vmware.api [None req-6460431f-a5cb-4166-b8d8-45fcdc14b1d7 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Waiting for the task: (returnval){ [ 1032.479360] env[62503]: value = "task-1388166" [ 1032.479360] env[62503]: _type = "Task" [ 1032.479360] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1032.487138] env[62503]: DEBUG oslo_vmware.api [None req-6460431f-a5cb-4166-b8d8-45fcdc14b1d7 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1388166, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.633539] env[62503]: DEBUG nova.compute.manager [None req-8f8175a8-01a8-4bc7-8603-56c0993b0c7d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 485d3aba-6c0d-46c7-860b-c0dbd9c16498] Starting instance... {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 1032.779675] env[62503]: DEBUG oslo_concurrency.lockutils [None req-971d784d-b28d-4337-bc9f-8b2ee172d1d1 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1032.779956] env[62503]: DEBUG oslo_concurrency.lockutils [None req-971d784d-b28d-4337-bc9f-8b2ee172d1d1 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1032.781563] env[62503]: INFO nova.compute.claims [None req-971d784d-b28d-4337-bc9f-8b2ee172d1d1 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 987b6101-565e-4eb2-b8af-f9afd5be38ce] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1032.865743] env[62503]: DEBUG oslo_concurrency.lockutils [None req-ede16f5c-a406-412b-b3cc-4c2aaf0638b3 tempest-ServersTestJSON-1118964903 tempest-ServersTestJSON-1118964903-project-member] Lock "d811353d-a484-4c15-abfa-3ebbd37816fc" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 4.729s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1032.884196] env[62503]: DEBUG oslo_concurrency.lockutils [None req-dae72632-7a91-47ab-a7d6-43626c24f7e1 tempest-ServerAddressesNegativeTestJSON-300837030 tempest-ServerAddressesNegativeTestJSON-300837030-project-member] Acquiring lock "refresh_cache-5fb35ae6-bfc6-4039-aa43-de8c550aacde" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1032.884381] env[62503]: DEBUG oslo_concurrency.lockutils [None req-dae72632-7a91-47ab-a7d6-43626c24f7e1 tempest-ServerAddressesNegativeTestJSON-300837030 tempest-ServerAddressesNegativeTestJSON-300837030-project-member] Acquired lock "refresh_cache-5fb35ae6-bfc6-4039-aa43-de8c550aacde" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1032.884539] env[62503]: DEBUG nova.network.neutron [None req-dae72632-7a91-47ab-a7d6-43626c24f7e1 tempest-ServerAddressesNegativeTestJSON-300837030 tempest-ServerAddressesNegativeTestJSON-300837030-project-member] [instance: 5fb35ae6-bfc6-4039-aa43-de8c550aacde] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1032.989360] env[62503]: DEBUG oslo_vmware.api [None req-6460431f-a5cb-4166-b8d8-45fcdc14b1d7 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1388166, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.154727] env[62503]: DEBUG oslo_concurrency.lockutils [None req-8f8175a8-01a8-4bc7-8603-56c0993b0c7d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1033.429626] env[62503]: DEBUG nova.network.neutron [None req-dae72632-7a91-47ab-a7d6-43626c24f7e1 tempest-ServerAddressesNegativeTestJSON-300837030 tempest-ServerAddressesNegativeTestJSON-300837030-project-member] [instance: 5fb35ae6-bfc6-4039-aa43-de8c550aacde] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1033.489769] env[62503]: DEBUG oslo_vmware.api [None req-6460431f-a5cb-4166-b8d8-45fcdc14b1d7 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1388166, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.566561] env[62503]: DEBUG nova.network.neutron [None req-dae72632-7a91-47ab-a7d6-43626c24f7e1 tempest-ServerAddressesNegativeTestJSON-300837030 tempest-ServerAddressesNegativeTestJSON-300837030-project-member] [instance: 5fb35ae6-bfc6-4039-aa43-de8c550aacde] Updating instance_info_cache with network_info: [{"id": "1cdefdaf-8339-4d4f-a744-b28fbeb535db", "address": "fa:16:3e:b1:26:4c", "network": {"id": "46af8e9e-0195-4b5b-ab46-e7fa04f9ecb2", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-628641463-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c130d7bdc38a4a98ba556abf5910cc5c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4b033f4d-2e92-4702-add6-410a29d3f251", "external-id": "nsx-vlan-transportzone-649", "segmentation_id": 649, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1cdefdaf-83", "ovs_interfaceid": "1cdefdaf-8339-4d4f-a744-b28fbeb535db", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1033.878210] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7783278-6681-4fac-8d4a-cf3117f58532 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.885719] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c847a00e-d277-496b-8f8f-6b67d7cceadc {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.917080] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7021238-49b2-47b6-add9-d67a44ebb2c6 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.924794] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cce63260-0fcd-4048-8695-eecb3c328224 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.937876] env[62503]: DEBUG nova.compute.provider_tree [None req-971d784d-b28d-4337-bc9f-8b2ee172d1d1 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1033.990510] env[62503]: DEBUG oslo_vmware.api [None req-6460431f-a5cb-4166-b8d8-45fcdc14b1d7 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1388166, 'name': ReconfigVM_Task, 'duration_secs': 1.318135} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1033.990811] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-6460431f-a5cb-4166-b8d8-45fcdc14b1d7 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: ccc542a3-ff01-42ca-965e-706bed4c6e07] Reconfigured VM instance instance-0000005f to attach disk [datastore2] volume-2e377e60-04b4-4faa-8609-29e7538d02e0/volume-2e377e60-04b4-4faa-8609-29e7538d02e0.vmdk or device None with type thin {{(pid=62503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1033.995547] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bdbf8325-53ed-4e0e-a738-a99b1305dc0e {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.011028] env[62503]: DEBUG oslo_vmware.api [None req-6460431f-a5cb-4166-b8d8-45fcdc14b1d7 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Waiting for the task: (returnval){ [ 1034.011028] env[62503]: value = "task-1388167" [ 1034.011028] env[62503]: _type = "Task" [ 1034.011028] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1034.019612] env[62503]: DEBUG oslo_vmware.api [None req-6460431f-a5cb-4166-b8d8-45fcdc14b1d7 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1388167, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.069462] env[62503]: DEBUG oslo_concurrency.lockutils [None req-dae72632-7a91-47ab-a7d6-43626c24f7e1 tempest-ServerAddressesNegativeTestJSON-300837030 tempest-ServerAddressesNegativeTestJSON-300837030-project-member] Releasing lock "refresh_cache-5fb35ae6-bfc6-4039-aa43-de8c550aacde" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1034.069881] env[62503]: DEBUG nova.compute.manager [None req-dae72632-7a91-47ab-a7d6-43626c24f7e1 tempest-ServerAddressesNegativeTestJSON-300837030 tempest-ServerAddressesNegativeTestJSON-300837030-project-member] [instance: 5fb35ae6-bfc6-4039-aa43-de8c550aacde] Instance network_info: |[{"id": "1cdefdaf-8339-4d4f-a744-b28fbeb535db", "address": "fa:16:3e:b1:26:4c", "network": {"id": "46af8e9e-0195-4b5b-ab46-e7fa04f9ecb2", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-628641463-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c130d7bdc38a4a98ba556abf5910cc5c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4b033f4d-2e92-4702-add6-410a29d3f251", "external-id": "nsx-vlan-transportzone-649", "segmentation_id": 649, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1cdefdaf-83", "ovs_interfaceid": "1cdefdaf-8339-4d4f-a744-b28fbeb535db", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1999}} [ 1034.070417] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-dae72632-7a91-47ab-a7d6-43626c24f7e1 tempest-ServerAddressesNegativeTestJSON-300837030 tempest-ServerAddressesNegativeTestJSON-300837030-project-member] [instance: 5fb35ae6-bfc6-4039-aa43-de8c550aacde] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b1:26:4c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4b033f4d-2e92-4702-add6-410a29d3f251', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1cdefdaf-8339-4d4f-a744-b28fbeb535db', 'vif_model': 'vmxnet3'}] {{(pid=62503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1034.077896] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-dae72632-7a91-47ab-a7d6-43626c24f7e1 tempest-ServerAddressesNegativeTestJSON-300837030 tempest-ServerAddressesNegativeTestJSON-300837030-project-member] Creating folder: Project (c130d7bdc38a4a98ba556abf5910cc5c). Parent ref: group-v294540. {{(pid=62503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1034.078188] env[62503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1840e1f7-104e-4fc8-bdbe-cbbf549d66a9 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.089508] env[62503]: INFO nova.virt.vmwareapi.vm_util [None req-dae72632-7a91-47ab-a7d6-43626c24f7e1 tempest-ServerAddressesNegativeTestJSON-300837030 tempest-ServerAddressesNegativeTestJSON-300837030-project-member] Created folder: Project (c130d7bdc38a4a98ba556abf5910cc5c) in parent group-v294540. [ 1034.089744] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-dae72632-7a91-47ab-a7d6-43626c24f7e1 tempest-ServerAddressesNegativeTestJSON-300837030 tempest-ServerAddressesNegativeTestJSON-300837030-project-member] Creating folder: Instances. Parent ref: group-v294643. {{(pid=62503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1034.090024] env[62503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2e3b24e4-781e-46b9-a1b3-95ddedecaa78 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.099752] env[62503]: INFO nova.virt.vmwareapi.vm_util [None req-dae72632-7a91-47ab-a7d6-43626c24f7e1 tempest-ServerAddressesNegativeTestJSON-300837030 tempest-ServerAddressesNegativeTestJSON-300837030-project-member] Created folder: Instances in parent group-v294643. [ 1034.100013] env[62503]: DEBUG oslo.service.loopingcall [None req-dae72632-7a91-47ab-a7d6-43626c24f7e1 tempest-ServerAddressesNegativeTestJSON-300837030 tempest-ServerAddressesNegativeTestJSON-300837030-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1034.100284] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5fb35ae6-bfc6-4039-aa43-de8c550aacde] Creating VM on the ESX host {{(pid=62503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1034.100499] env[62503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-eb2e2f08-c8ac-4814-a5fd-d13c203f34ee {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.119762] env[62503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1034.119762] env[62503]: value = "task-1388170" [ 1034.119762] env[62503]: _type = "Task" [ 1034.119762] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1034.127607] env[62503]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388170, 'name': CreateVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.307331] env[62503]: DEBUG nova.compute.manager [req-c85e9146-d3a3-4186-bc4a-9d55d588c94b req-efef8964-adc4-40d8-b314-3e8325cd781f service nova] [instance: 5fb35ae6-bfc6-4039-aa43-de8c550aacde] Received event network-changed-1cdefdaf-8339-4d4f-a744-b28fbeb535db {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 1034.307331] env[62503]: DEBUG nova.compute.manager [req-c85e9146-d3a3-4186-bc4a-9d55d588c94b req-efef8964-adc4-40d8-b314-3e8325cd781f service nova] [instance: 5fb35ae6-bfc6-4039-aa43-de8c550aacde] Refreshing instance network info cache due to event network-changed-1cdefdaf-8339-4d4f-a744-b28fbeb535db. {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11432}} [ 1034.307331] env[62503]: DEBUG oslo_concurrency.lockutils [req-c85e9146-d3a3-4186-bc4a-9d55d588c94b req-efef8964-adc4-40d8-b314-3e8325cd781f service nova] Acquiring lock "refresh_cache-5fb35ae6-bfc6-4039-aa43-de8c550aacde" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1034.307331] env[62503]: DEBUG oslo_concurrency.lockutils [req-c85e9146-d3a3-4186-bc4a-9d55d588c94b req-efef8964-adc4-40d8-b314-3e8325cd781f service nova] Acquired lock "refresh_cache-5fb35ae6-bfc6-4039-aa43-de8c550aacde" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1034.307331] env[62503]: DEBUG nova.network.neutron [req-c85e9146-d3a3-4186-bc4a-9d55d588c94b req-efef8964-adc4-40d8-b314-3e8325cd781f service nova] [instance: 5fb35ae6-bfc6-4039-aa43-de8c550aacde] Refreshing network info cache for port 1cdefdaf-8339-4d4f-a744-b28fbeb535db {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1034.441308] env[62503]: DEBUG nova.scheduler.client.report [None req-971d784d-b28d-4337-bc9f-8b2ee172d1d1 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1034.520677] env[62503]: DEBUG oslo_vmware.api [None req-6460431f-a5cb-4166-b8d8-45fcdc14b1d7 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1388167, 'name': ReconfigVM_Task, 'duration_secs': 0.140677} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1034.521154] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-6460431f-a5cb-4166-b8d8-45fcdc14b1d7 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: ccc542a3-ff01-42ca-965e-706bed4c6e07] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-294642', 'volume_id': '2e377e60-04b4-4faa-8609-29e7538d02e0', 'name': 'volume-2e377e60-04b4-4faa-8609-29e7538d02e0', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'ccc542a3-ff01-42ca-965e-706bed4c6e07', 'attached_at': '', 'detached_at': '', 'volume_id': '2e377e60-04b4-4faa-8609-29e7538d02e0', 'serial': '2e377e60-04b4-4faa-8609-29e7538d02e0'} {{(pid=62503) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1034.629972] env[62503]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388170, 'name': CreateVM_Task, 'duration_secs': 0.420567} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1034.630326] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5fb35ae6-bfc6-4039-aa43-de8c550aacde] Created VM on the ESX host {{(pid=62503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1034.631219] env[62503]: DEBUG oslo_concurrency.lockutils [None req-dae72632-7a91-47ab-a7d6-43626c24f7e1 tempest-ServerAddressesNegativeTestJSON-300837030 tempest-ServerAddressesNegativeTestJSON-300837030-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1034.631462] env[62503]: DEBUG oslo_concurrency.lockutils [None req-dae72632-7a91-47ab-a7d6-43626c24f7e1 tempest-ServerAddressesNegativeTestJSON-300837030 tempest-ServerAddressesNegativeTestJSON-300837030-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1034.631879] env[62503]: DEBUG oslo_concurrency.lockutils [None req-dae72632-7a91-47ab-a7d6-43626c24f7e1 tempest-ServerAddressesNegativeTestJSON-300837030 tempest-ServerAddressesNegativeTestJSON-300837030-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1034.632216] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d5195cfe-a436-4922-b35c-564446001931 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.636878] env[62503]: DEBUG oslo_vmware.api [None req-dae72632-7a91-47ab-a7d6-43626c24f7e1 tempest-ServerAddressesNegativeTestJSON-300837030 tempest-ServerAddressesNegativeTestJSON-300837030-project-member] Waiting for the task: (returnval){ [ 1034.636878] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]5290eb93-1eaa-a9ec-6806-b7bd00f633b6" [ 1034.636878] env[62503]: _type = "Task" [ 1034.636878] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1034.644255] env[62503]: DEBUG oslo_vmware.api [None req-dae72632-7a91-47ab-a7d6-43626c24f7e1 tempest-ServerAddressesNegativeTestJSON-300837030 tempest-ServerAddressesNegativeTestJSON-300837030-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]5290eb93-1eaa-a9ec-6806-b7bd00f633b6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.947411] env[62503]: DEBUG oslo_concurrency.lockutils [None req-971d784d-b28d-4337-bc9f-8b2ee172d1d1 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.167s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1034.947779] env[62503]: DEBUG nova.compute.manager [None req-971d784d-b28d-4337-bc9f-8b2ee172d1d1 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 987b6101-565e-4eb2-b8af-f9afd5be38ce] Start building networks asynchronously for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2832}} [ 1034.950960] env[62503]: DEBUG oslo_concurrency.lockutils [None req-8f8175a8-01a8-4bc7-8603-56c0993b0c7d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.796s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1034.952063] env[62503]: INFO nova.compute.claims [None req-8f8175a8-01a8-4bc7-8603-56c0993b0c7d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 485d3aba-6c0d-46c7-860b-c0dbd9c16498] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1035.038102] env[62503]: DEBUG nova.network.neutron [req-c85e9146-d3a3-4186-bc4a-9d55d588c94b req-efef8964-adc4-40d8-b314-3e8325cd781f service nova] [instance: 5fb35ae6-bfc6-4039-aa43-de8c550aacde] Updated VIF entry in instance network info cache for port 1cdefdaf-8339-4d4f-a744-b28fbeb535db. {{(pid=62503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1035.038497] env[62503]: DEBUG nova.network.neutron [req-c85e9146-d3a3-4186-bc4a-9d55d588c94b req-efef8964-adc4-40d8-b314-3e8325cd781f service nova] [instance: 5fb35ae6-bfc6-4039-aa43-de8c550aacde] Updating instance_info_cache with network_info: [{"id": "1cdefdaf-8339-4d4f-a744-b28fbeb535db", "address": "fa:16:3e:b1:26:4c", "network": {"id": "46af8e9e-0195-4b5b-ab46-e7fa04f9ecb2", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-628641463-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c130d7bdc38a4a98ba556abf5910cc5c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4b033f4d-2e92-4702-add6-410a29d3f251", "external-id": "nsx-vlan-transportzone-649", "segmentation_id": 649, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1cdefdaf-83", "ovs_interfaceid": "1cdefdaf-8339-4d4f-a744-b28fbeb535db", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1035.146790] env[62503]: DEBUG oslo_vmware.api [None req-dae72632-7a91-47ab-a7d6-43626c24f7e1 tempest-ServerAddressesNegativeTestJSON-300837030 tempest-ServerAddressesNegativeTestJSON-300837030-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]5290eb93-1eaa-a9ec-6806-b7bd00f633b6, 'name': SearchDatastore_Task, 'duration_secs': 0.00848} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1035.147107] env[62503]: DEBUG oslo_concurrency.lockutils [None req-dae72632-7a91-47ab-a7d6-43626c24f7e1 tempest-ServerAddressesNegativeTestJSON-300837030 tempest-ServerAddressesNegativeTestJSON-300837030-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1035.147377] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-dae72632-7a91-47ab-a7d6-43626c24f7e1 tempest-ServerAddressesNegativeTestJSON-300837030 tempest-ServerAddressesNegativeTestJSON-300837030-project-member] [instance: 5fb35ae6-bfc6-4039-aa43-de8c550aacde] Processing image 8150ca02-f879-471d-8913-459408f127a1 {{(pid=62503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1035.147626] env[62503]: DEBUG oslo_concurrency.lockutils [None req-dae72632-7a91-47ab-a7d6-43626c24f7e1 tempest-ServerAddressesNegativeTestJSON-300837030 tempest-ServerAddressesNegativeTestJSON-300837030-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1035.147778] env[62503]: DEBUG oslo_concurrency.lockutils [None req-dae72632-7a91-47ab-a7d6-43626c24f7e1 tempest-ServerAddressesNegativeTestJSON-300837030 tempest-ServerAddressesNegativeTestJSON-300837030-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1035.147959] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-dae72632-7a91-47ab-a7d6-43626c24f7e1 tempest-ServerAddressesNegativeTestJSON-300837030 tempest-ServerAddressesNegativeTestJSON-300837030-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1035.148235] env[62503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d243b42e-41dd-486f-8e37-5ecd221bbd68 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.156287] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-dae72632-7a91-47ab-a7d6-43626c24f7e1 tempest-ServerAddressesNegativeTestJSON-300837030 tempest-ServerAddressesNegativeTestJSON-300837030-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1035.156472] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-dae72632-7a91-47ab-a7d6-43626c24f7e1 tempest-ServerAddressesNegativeTestJSON-300837030 tempest-ServerAddressesNegativeTestJSON-300837030-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1035.157151] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-93b72a00-92ac-48de-964f-8ddbe5d0c2d1 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.162406] env[62503]: DEBUG oslo_vmware.api [None req-dae72632-7a91-47ab-a7d6-43626c24f7e1 tempest-ServerAddressesNegativeTestJSON-300837030 tempest-ServerAddressesNegativeTestJSON-300837030-project-member] Waiting for the task: (returnval){ [ 1035.162406] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]5277f24a-be31-7e3b-83e0-b728d9051c54" [ 1035.162406] env[62503]: _type = "Task" [ 1035.162406] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1035.169842] env[62503]: DEBUG oslo_vmware.api [None req-dae72632-7a91-47ab-a7d6-43626c24f7e1 tempest-ServerAddressesNegativeTestJSON-300837030 tempest-ServerAddressesNegativeTestJSON-300837030-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]5277f24a-be31-7e3b-83e0-b728d9051c54, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.457032] env[62503]: DEBUG nova.compute.utils [None req-971d784d-b28d-4337-bc9f-8b2ee172d1d1 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Using /dev/sd instead of None {{(pid=62503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1035.460117] env[62503]: DEBUG nova.compute.manager [None req-971d784d-b28d-4337-bc9f-8b2ee172d1d1 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 987b6101-565e-4eb2-b8af-f9afd5be38ce] Allocating IP information in the background. {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 1035.460285] env[62503]: DEBUG nova.network.neutron [None req-971d784d-b28d-4337-bc9f-8b2ee172d1d1 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 987b6101-565e-4eb2-b8af-f9afd5be38ce] allocate_for_instance() {{(pid=62503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1035.499418] env[62503]: DEBUG nova.policy [None req-971d784d-b28d-4337-bc9f-8b2ee172d1d1 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ef723806cc714bf7a98b659c4343a094', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b86eee9480274a9196fc8ccd920671f0', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62503) authorize /opt/stack/nova/nova/policy.py:201}} [ 1035.541741] env[62503]: DEBUG oslo_concurrency.lockutils [req-c85e9146-d3a3-4186-bc4a-9d55d588c94b req-efef8964-adc4-40d8-b314-3e8325cd781f service nova] Releasing lock "refresh_cache-5fb35ae6-bfc6-4039-aa43-de8c550aacde" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1035.559830] env[62503]: DEBUG nova.objects.instance [None req-6460431f-a5cb-4166-b8d8-45fcdc14b1d7 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Lazy-loading 'flavor' on Instance uuid ccc542a3-ff01-42ca-965e-706bed4c6e07 {{(pid=62503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1035.675946] env[62503]: DEBUG oslo_vmware.api [None req-dae72632-7a91-47ab-a7d6-43626c24f7e1 tempest-ServerAddressesNegativeTestJSON-300837030 tempest-ServerAddressesNegativeTestJSON-300837030-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]5277f24a-be31-7e3b-83e0-b728d9051c54, 'name': SearchDatastore_Task, 'duration_secs': 0.007889} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1035.675946] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6f971e83-e2a4-482d-b358-817b656eb6bb {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.681891] env[62503]: DEBUG oslo_vmware.api [None req-dae72632-7a91-47ab-a7d6-43626c24f7e1 tempest-ServerAddressesNegativeTestJSON-300837030 tempest-ServerAddressesNegativeTestJSON-300837030-project-member] Waiting for the task: (returnval){ [ 1035.681891] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52a3055e-4473-76de-8f49-cf8f6d1b707c" [ 1035.681891] env[62503]: _type = "Task" [ 1035.681891] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1035.690631] env[62503]: DEBUG oslo_vmware.api [None req-dae72632-7a91-47ab-a7d6-43626c24f7e1 tempest-ServerAddressesNegativeTestJSON-300837030 tempest-ServerAddressesNegativeTestJSON-300837030-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52a3055e-4473-76de-8f49-cf8f6d1b707c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.830767] env[62503]: DEBUG nova.network.neutron [None req-971d784d-b28d-4337-bc9f-8b2ee172d1d1 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 987b6101-565e-4eb2-b8af-f9afd5be38ce] Successfully created port: e0d3c642-f374-431c-95ca-9211403e44c6 {{(pid=62503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1035.961627] env[62503]: DEBUG nova.compute.manager [None req-971d784d-b28d-4337-bc9f-8b2ee172d1d1 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 987b6101-565e-4eb2-b8af-f9afd5be38ce] Start building block device mappings for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2867}} [ 1036.065444] env[62503]: DEBUG oslo_concurrency.lockutils [None req-6460431f-a5cb-4166-b8d8-45fcdc14b1d7 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Lock "ccc542a3-ff01-42ca-965e-706bed4c6e07" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.270s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1036.076276] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4eb5ea0-074e-4348-83e7-95bc279f917b {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.085360] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ec995ff-2fd8-4b4f-8135-002dc7cc72de {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.115925] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c295e736-3e48-4552-ada2-ddedbbce8b80 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.123534] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f9cd9dd-ba8a-4131-8fc3-8a2f24de243d {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.138109] env[62503]: DEBUG nova.compute.provider_tree [None req-8f8175a8-01a8-4bc7-8603-56c0993b0c7d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1036.192322] env[62503]: DEBUG oslo_vmware.api [None req-dae72632-7a91-47ab-a7d6-43626c24f7e1 tempest-ServerAddressesNegativeTestJSON-300837030 tempest-ServerAddressesNegativeTestJSON-300837030-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52a3055e-4473-76de-8f49-cf8f6d1b707c, 'name': SearchDatastore_Task, 'duration_secs': 0.009399} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1036.192600] env[62503]: DEBUG oslo_concurrency.lockutils [None req-dae72632-7a91-47ab-a7d6-43626c24f7e1 tempest-ServerAddressesNegativeTestJSON-300837030 tempest-ServerAddressesNegativeTestJSON-300837030-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1036.192845] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-dae72632-7a91-47ab-a7d6-43626c24f7e1 tempest-ServerAddressesNegativeTestJSON-300837030 tempest-ServerAddressesNegativeTestJSON-300837030-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk to [datastore2] 5fb35ae6-bfc6-4039-aa43-de8c550aacde/5fb35ae6-bfc6-4039-aa43-de8c550aacde.vmdk {{(pid=62503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1036.193156] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e8161a59-4d1a-44c6-91c7-50fb035b621c {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.198933] env[62503]: DEBUG oslo_vmware.api [None req-dae72632-7a91-47ab-a7d6-43626c24f7e1 tempest-ServerAddressesNegativeTestJSON-300837030 tempest-ServerAddressesNegativeTestJSON-300837030-project-member] Waiting for the task: (returnval){ [ 1036.198933] env[62503]: value = "task-1388171" [ 1036.198933] env[62503]: _type = "Task" [ 1036.198933] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1036.206536] env[62503]: DEBUG oslo_vmware.api [None req-dae72632-7a91-47ab-a7d6-43626c24f7e1 tempest-ServerAddressesNegativeTestJSON-300837030 tempest-ServerAddressesNegativeTestJSON-300837030-project-member] Task: {'id': task-1388171, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.551825] env[62503]: INFO nova.compute.manager [None req-ebf43edd-91f3-4c77-9aa1-a539c24bc88f tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: ccc542a3-ff01-42ca-965e-706bed4c6e07] Rebuilding instance [ 1036.593669] env[62503]: DEBUG nova.compute.manager [None req-ebf43edd-91f3-4c77-9aa1-a539c24bc88f tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: ccc542a3-ff01-42ca-965e-706bed4c6e07] Checking state {{(pid=62503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1794}} [ 1036.594605] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a4a1e96-1af9-4890-a5fa-9ce0a6b95aae {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.640798] env[62503]: DEBUG nova.scheduler.client.report [None req-8f8175a8-01a8-4bc7-8603-56c0993b0c7d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1036.708334] env[62503]: DEBUG oslo_vmware.api [None req-dae72632-7a91-47ab-a7d6-43626c24f7e1 tempest-ServerAddressesNegativeTestJSON-300837030 tempest-ServerAddressesNegativeTestJSON-300837030-project-member] Task: {'id': task-1388171, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.461451} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1036.708585] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-dae72632-7a91-47ab-a7d6-43626c24f7e1 tempest-ServerAddressesNegativeTestJSON-300837030 tempest-ServerAddressesNegativeTestJSON-300837030-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk to [datastore2] 5fb35ae6-bfc6-4039-aa43-de8c550aacde/5fb35ae6-bfc6-4039-aa43-de8c550aacde.vmdk {{(pid=62503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1036.708799] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-dae72632-7a91-47ab-a7d6-43626c24f7e1 tempest-ServerAddressesNegativeTestJSON-300837030 tempest-ServerAddressesNegativeTestJSON-300837030-project-member] [instance: 5fb35ae6-bfc6-4039-aa43-de8c550aacde] Extending root virtual disk to 1048576 {{(pid=62503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1036.709049] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-53133df1-35c7-4693-8672-b9f7ef22eb1a {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.714447] env[62503]: DEBUG oslo_vmware.api [None req-dae72632-7a91-47ab-a7d6-43626c24f7e1 tempest-ServerAddressesNegativeTestJSON-300837030 tempest-ServerAddressesNegativeTestJSON-300837030-project-member] Waiting for the task: (returnval){ [ 1036.714447] env[62503]: value = "task-1388172" [ 1036.714447] env[62503]: _type = "Task" [ 1036.714447] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1036.722326] env[62503]: DEBUG oslo_vmware.api [None req-dae72632-7a91-47ab-a7d6-43626c24f7e1 tempest-ServerAddressesNegativeTestJSON-300837030 tempest-ServerAddressesNegativeTestJSON-300837030-project-member] Task: {'id': task-1388172, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.971410] env[62503]: DEBUG nova.compute.manager [None req-971d784d-b28d-4337-bc9f-8b2ee172d1d1 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 987b6101-565e-4eb2-b8af-f9afd5be38ce] Start spawning the instance on the hypervisor. {{(pid=62503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2641}} [ 1037.019418] env[62503]: DEBUG nova.virt.hardware [None req-971d784d-b28d-4337-bc9f-8b2ee172d1d1 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:26:20Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:26:03Z,direct_url=,disk_format='vmdk',id=8150ca02-f879-471d-8913-459408f127a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26d9ee75d25b4018b8bb1fb11c8bd98c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:26:04Z,virtual_size=,visibility=), allow threads: False {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1037.019622] env[62503]: DEBUG nova.virt.hardware [None req-971d784d-b28d-4337-bc9f-8b2ee172d1d1 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Flavor limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1037.019762] env[62503]: DEBUG nova.virt.hardware [None req-971d784d-b28d-4337-bc9f-8b2ee172d1d1 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Image limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1037.019975] env[62503]: DEBUG nova.virt.hardware [None req-971d784d-b28d-4337-bc9f-8b2ee172d1d1 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Flavor pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1037.020179] env[62503]: DEBUG nova.virt.hardware [None req-971d784d-b28d-4337-bc9f-8b2ee172d1d1 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Image pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1037.020342] env[62503]: DEBUG nova.virt.hardware [None req-971d784d-b28d-4337-bc9f-8b2ee172d1d1 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1037.020607] env[62503]: DEBUG nova.virt.hardware [None req-971d784d-b28d-4337-bc9f-8b2ee172d1d1 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1037.020792] env[62503]: DEBUG nova.virt.hardware [None req-971d784d-b28d-4337-bc9f-8b2ee172d1d1 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1037.020969] env[62503]: DEBUG nova.virt.hardware [None req-971d784d-b28d-4337-bc9f-8b2ee172d1d1 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Got 1 possible topologies {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1037.021149] env[62503]: DEBUG nova.virt.hardware [None req-971d784d-b28d-4337-bc9f-8b2ee172d1d1 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1037.021328] env[62503]: DEBUG nova.virt.hardware [None req-971d784d-b28d-4337-bc9f-8b2ee172d1d1 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1037.022242] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9be87a5-63c0-47f3-81ad-b750cf3ddaac {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.032400] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93e19298-1609-4caa-9c70-514d9d81ac6f {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.145642] env[62503]: DEBUG oslo_concurrency.lockutils [None req-8f8175a8-01a8-4bc7-8603-56c0993b0c7d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.195s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1037.146242] env[62503]: DEBUG nova.compute.manager [None req-8f8175a8-01a8-4bc7-8603-56c0993b0c7d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 485d3aba-6c0d-46c7-860b-c0dbd9c16498] Start building networks asynchronously for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2832}} [ 1037.223890] env[62503]: DEBUG oslo_vmware.api [None req-dae72632-7a91-47ab-a7d6-43626c24f7e1 tempest-ServerAddressesNegativeTestJSON-300837030 tempest-ServerAddressesNegativeTestJSON-300837030-project-member] Task: {'id': task-1388172, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.059799} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.224190] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-dae72632-7a91-47ab-a7d6-43626c24f7e1 tempest-ServerAddressesNegativeTestJSON-300837030 tempest-ServerAddressesNegativeTestJSON-300837030-project-member] [instance: 5fb35ae6-bfc6-4039-aa43-de8c550aacde] Extended root virtual disk {{(pid=62503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1037.224953] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3338aae-76e0-4db6-a9f8-148c6eb4cd6d {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.248018] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-dae72632-7a91-47ab-a7d6-43626c24f7e1 tempest-ServerAddressesNegativeTestJSON-300837030 tempest-ServerAddressesNegativeTestJSON-300837030-project-member] [instance: 5fb35ae6-bfc6-4039-aa43-de8c550aacde] Reconfiguring VM instance instance-00000066 to attach disk [datastore2] 5fb35ae6-bfc6-4039-aa43-de8c550aacde/5fb35ae6-bfc6-4039-aa43-de8c550aacde.vmdk or device None with type sparse {{(pid=62503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1037.248018] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a34c04f2-c71b-47b8-bf33-3a692f2702d0 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.266124] env[62503]: DEBUG oslo_vmware.api [None req-dae72632-7a91-47ab-a7d6-43626c24f7e1 tempest-ServerAddressesNegativeTestJSON-300837030 tempest-ServerAddressesNegativeTestJSON-300837030-project-member] Waiting for the task: (returnval){ [ 1037.266124] env[62503]: value = "task-1388173" [ 1037.266124] env[62503]: _type = "Task" [ 1037.266124] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1037.273274] env[62503]: DEBUG oslo_vmware.api [None req-dae72632-7a91-47ab-a7d6-43626c24f7e1 tempest-ServerAddressesNegativeTestJSON-300837030 tempest-ServerAddressesNegativeTestJSON-300837030-project-member] Task: {'id': task-1388173, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.287827] env[62503]: DEBUG nova.compute.manager [req-f03e5b1a-cb6a-4513-a6dd-896ef1a97594 req-31bb5255-7563-4be9-8648-819ce26d9cfa service nova] [instance: 987b6101-565e-4eb2-b8af-f9afd5be38ce] Received event network-vif-plugged-e0d3c642-f374-431c-95ca-9211403e44c6 {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 1037.288553] env[62503]: DEBUG oslo_concurrency.lockutils [req-f03e5b1a-cb6a-4513-a6dd-896ef1a97594 req-31bb5255-7563-4be9-8648-819ce26d9cfa service nova] Acquiring lock "987b6101-565e-4eb2-b8af-f9afd5be38ce-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1037.288553] env[62503]: DEBUG oslo_concurrency.lockutils [req-f03e5b1a-cb6a-4513-a6dd-896ef1a97594 req-31bb5255-7563-4be9-8648-819ce26d9cfa service nova] Lock "987b6101-565e-4eb2-b8af-f9afd5be38ce-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1037.288553] env[62503]: DEBUG oslo_concurrency.lockutils [req-f03e5b1a-cb6a-4513-a6dd-896ef1a97594 req-31bb5255-7563-4be9-8648-819ce26d9cfa service nova] Lock "987b6101-565e-4eb2-b8af-f9afd5be38ce-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1037.289119] env[62503]: DEBUG nova.compute.manager [req-f03e5b1a-cb6a-4513-a6dd-896ef1a97594 req-31bb5255-7563-4be9-8648-819ce26d9cfa service nova] [instance: 987b6101-565e-4eb2-b8af-f9afd5be38ce] No waiting events found dispatching network-vif-plugged-e0d3c642-f374-431c-95ca-9211403e44c6 {{(pid=62503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1037.289119] env[62503]: WARNING nova.compute.manager [req-f03e5b1a-cb6a-4513-a6dd-896ef1a97594 req-31bb5255-7563-4be9-8648-819ce26d9cfa service nova] [instance: 987b6101-565e-4eb2-b8af-f9afd5be38ce] Received unexpected event network-vif-plugged-e0d3c642-f374-431c-95ca-9211403e44c6 for instance with vm_state building and task_state spawning. [ 1037.386014] env[62503]: DEBUG nova.network.neutron [None req-971d784d-b28d-4337-bc9f-8b2ee172d1d1 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 987b6101-565e-4eb2-b8af-f9afd5be38ce] Successfully updated port: e0d3c642-f374-431c-95ca-9211403e44c6 {{(pid=62503) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1037.611274] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-ebf43edd-91f3-4c77-9aa1-a539c24bc88f tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: ccc542a3-ff01-42ca-965e-706bed4c6e07] Powering off the VM {{(pid=62503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1037.611555] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f66308cd-cc01-470c-8396-e08e31ed04aa {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.618013] env[62503]: DEBUG oslo_vmware.api [None req-ebf43edd-91f3-4c77-9aa1-a539c24bc88f tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Waiting for the task: (returnval){ [ 1037.618013] env[62503]: value = "task-1388174" [ 1037.618013] env[62503]: _type = "Task" [ 1037.618013] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1037.626466] env[62503]: DEBUG oslo_vmware.api [None req-ebf43edd-91f3-4c77-9aa1-a539c24bc88f tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1388174, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.651283] env[62503]: DEBUG nova.compute.utils [None req-8f8175a8-01a8-4bc7-8603-56c0993b0c7d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Using /dev/sd instead of None {{(pid=62503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1037.652931] env[62503]: DEBUG nova.compute.manager [None req-8f8175a8-01a8-4bc7-8603-56c0993b0c7d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 485d3aba-6c0d-46c7-860b-c0dbd9c16498] Allocating IP information in the background. {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 1037.653200] env[62503]: DEBUG nova.network.neutron [None req-8f8175a8-01a8-4bc7-8603-56c0993b0c7d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 485d3aba-6c0d-46c7-860b-c0dbd9c16498] allocate_for_instance() {{(pid=62503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1037.692335] env[62503]: DEBUG nova.policy [None req-8f8175a8-01a8-4bc7-8603-56c0993b0c7d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ef723806cc714bf7a98b659c4343a094', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b86eee9480274a9196fc8ccd920671f0', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62503) authorize /opt/stack/nova/nova/policy.py:201}} [ 1037.776985] env[62503]: DEBUG oslo_vmware.api [None req-dae72632-7a91-47ab-a7d6-43626c24f7e1 tempest-ServerAddressesNegativeTestJSON-300837030 tempest-ServerAddressesNegativeTestJSON-300837030-project-member] Task: {'id': task-1388173, 'name': ReconfigVM_Task, 'duration_secs': 0.299666} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.777371] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-dae72632-7a91-47ab-a7d6-43626c24f7e1 tempest-ServerAddressesNegativeTestJSON-300837030 tempest-ServerAddressesNegativeTestJSON-300837030-project-member] [instance: 5fb35ae6-bfc6-4039-aa43-de8c550aacde] Reconfigured VM instance instance-00000066 to attach disk [datastore2] 5fb35ae6-bfc6-4039-aa43-de8c550aacde/5fb35ae6-bfc6-4039-aa43-de8c550aacde.vmdk or device None with type sparse {{(pid=62503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1037.778043] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-dcc5bf99-2ed5-413d-9949-e5f47b1796b9 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.784976] env[62503]: DEBUG oslo_vmware.api [None req-dae72632-7a91-47ab-a7d6-43626c24f7e1 tempest-ServerAddressesNegativeTestJSON-300837030 tempest-ServerAddressesNegativeTestJSON-300837030-project-member] Waiting for the task: (returnval){ [ 1037.784976] env[62503]: value = "task-1388175" [ 1037.784976] env[62503]: _type = "Task" [ 1037.784976] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1037.795642] env[62503]: DEBUG oslo_vmware.api [None req-dae72632-7a91-47ab-a7d6-43626c24f7e1 tempest-ServerAddressesNegativeTestJSON-300837030 tempest-ServerAddressesNegativeTestJSON-300837030-project-member] Task: {'id': task-1388175, 'name': Rename_Task} progress is 5%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.888717] env[62503]: DEBUG oslo_concurrency.lockutils [None req-971d784d-b28d-4337-bc9f-8b2ee172d1d1 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Acquiring lock "refresh_cache-987b6101-565e-4eb2-b8af-f9afd5be38ce" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1037.888879] env[62503]: DEBUG oslo_concurrency.lockutils [None req-971d784d-b28d-4337-bc9f-8b2ee172d1d1 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Acquired lock "refresh_cache-987b6101-565e-4eb2-b8af-f9afd5be38ce" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1037.889070] env[62503]: DEBUG nova.network.neutron [None req-971d784d-b28d-4337-bc9f-8b2ee172d1d1 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 987b6101-565e-4eb2-b8af-f9afd5be38ce] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1037.944287] env[62503]: DEBUG nova.network.neutron [None req-8f8175a8-01a8-4bc7-8603-56c0993b0c7d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 485d3aba-6c0d-46c7-860b-c0dbd9c16498] Successfully created port: b0401560-7408-4d52-a32c-906d5934c94e {{(pid=62503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1038.128466] env[62503]: DEBUG oslo_vmware.api [None req-ebf43edd-91f3-4c77-9aa1-a539c24bc88f tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1388174, 'name': PowerOffVM_Task, 'duration_secs': 0.192165} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1038.128743] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-ebf43edd-91f3-4c77-9aa1-a539c24bc88f tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: ccc542a3-ff01-42ca-965e-706bed4c6e07] Powered off the VM {{(pid=62503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1038.156121] env[62503]: DEBUG nova.compute.manager [None req-8f8175a8-01a8-4bc7-8603-56c0993b0c7d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 485d3aba-6c0d-46c7-860b-c0dbd9c16498] Start building block device mappings for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2867}} [ 1038.184893] env[62503]: INFO nova.compute.manager [None req-ebf43edd-91f3-4c77-9aa1-a539c24bc88f tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: ccc542a3-ff01-42ca-965e-706bed4c6e07] Detaching volume 2e377e60-04b4-4faa-8609-29e7538d02e0 [ 1038.216896] env[62503]: INFO nova.virt.block_device [None req-ebf43edd-91f3-4c77-9aa1-a539c24bc88f tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: ccc542a3-ff01-42ca-965e-706bed4c6e07] Attempting to driver detach volume 2e377e60-04b4-4faa-8609-29e7538d02e0 from mountpoint /dev/sdb [ 1038.217151] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-ebf43edd-91f3-4c77-9aa1-a539c24bc88f tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: ccc542a3-ff01-42ca-965e-706bed4c6e07] Volume detach. Driver type: vmdk {{(pid=62503) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1038.217351] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-ebf43edd-91f3-4c77-9aa1-a539c24bc88f tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: ccc542a3-ff01-42ca-965e-706bed4c6e07] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-294642', 'volume_id': '2e377e60-04b4-4faa-8609-29e7538d02e0', 'name': 'volume-2e377e60-04b4-4faa-8609-29e7538d02e0', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'ccc542a3-ff01-42ca-965e-706bed4c6e07', 'attached_at': '', 'detached_at': '', 'volume_id': '2e377e60-04b4-4faa-8609-29e7538d02e0', 'serial': '2e377e60-04b4-4faa-8609-29e7538d02e0'} {{(pid=62503) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1038.218252] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6376be59-66e3-4310-ac5b-7d2d2c029d5d {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.240881] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99cb8088-afcb-4e67-8ed4-135e4f9659d5 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.248772] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82678f24-caf2-4ca3-82ee-f603c2e3aa0c {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.267948] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-019cb02d-3527-47eb-8462-8c3ff9378eb6 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.284969] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-ebf43edd-91f3-4c77-9aa1-a539c24bc88f tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] The volume has not been displaced from its original location: [datastore2] volume-2e377e60-04b4-4faa-8609-29e7538d02e0/volume-2e377e60-04b4-4faa-8609-29e7538d02e0.vmdk. No consolidation needed. {{(pid=62503) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1038.290939] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-ebf43edd-91f3-4c77-9aa1-a539c24bc88f tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: ccc542a3-ff01-42ca-965e-706bed4c6e07] Reconfiguring VM instance instance-0000005f to detach disk 2001 {{(pid=62503) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1038.291269] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-564587a9-da96-426a-ba44-ea381c9c7cb2 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.312250] env[62503]: DEBUG oslo_vmware.api [None req-dae72632-7a91-47ab-a7d6-43626c24f7e1 tempest-ServerAddressesNegativeTestJSON-300837030 tempest-ServerAddressesNegativeTestJSON-300837030-project-member] Task: {'id': task-1388175, 'name': Rename_Task, 'duration_secs': 0.262468} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1038.313649] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-dae72632-7a91-47ab-a7d6-43626c24f7e1 tempest-ServerAddressesNegativeTestJSON-300837030 tempest-ServerAddressesNegativeTestJSON-300837030-project-member] [instance: 5fb35ae6-bfc6-4039-aa43-de8c550aacde] Powering on the VM {{(pid=62503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1038.313958] env[62503]: DEBUG oslo_vmware.api [None req-ebf43edd-91f3-4c77-9aa1-a539c24bc88f tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Waiting for the task: (returnval){ [ 1038.313958] env[62503]: value = "task-1388176" [ 1038.313958] env[62503]: _type = "Task" [ 1038.313958] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1038.314149] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ca968ab0-9a21-4dba-be95-07187b08c495 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.324359] env[62503]: DEBUG oslo_vmware.api [None req-ebf43edd-91f3-4c77-9aa1-a539c24bc88f tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1388176, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.325551] env[62503]: DEBUG oslo_vmware.api [None req-dae72632-7a91-47ab-a7d6-43626c24f7e1 tempest-ServerAddressesNegativeTestJSON-300837030 tempest-ServerAddressesNegativeTestJSON-300837030-project-member] Waiting for the task: (returnval){ [ 1038.325551] env[62503]: value = "task-1388177" [ 1038.325551] env[62503]: _type = "Task" [ 1038.325551] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1038.332585] env[62503]: DEBUG oslo_vmware.api [None req-dae72632-7a91-47ab-a7d6-43626c24f7e1 tempest-ServerAddressesNegativeTestJSON-300837030 tempest-ServerAddressesNegativeTestJSON-300837030-project-member] Task: {'id': task-1388177, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.426233] env[62503]: DEBUG nova.network.neutron [None req-971d784d-b28d-4337-bc9f-8b2ee172d1d1 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 987b6101-565e-4eb2-b8af-f9afd5be38ce] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1038.563470] env[62503]: DEBUG nova.network.neutron [None req-971d784d-b28d-4337-bc9f-8b2ee172d1d1 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 987b6101-565e-4eb2-b8af-f9afd5be38ce] Updating instance_info_cache with network_info: [{"id": "e0d3c642-f374-431c-95ca-9211403e44c6", "address": "fa:16:3e:cb:55:28", "network": {"id": "62a43567-d25a-406f-a4c8-68e056f67595", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-156718245-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b86eee9480274a9196fc8ccd920671f0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b8b5b5e2-866d-4ab5-b74d-4a47de0c4877", "external-id": "nsx-vlan-transportzone-74", "segmentation_id": 74, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape0d3c642-f3", "ovs_interfaceid": "e0d3c642-f374-431c-95ca-9211403e44c6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1038.828244] env[62503]: DEBUG oslo_vmware.api [None req-ebf43edd-91f3-4c77-9aa1-a539c24bc88f tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1388176, 'name': ReconfigVM_Task, 'duration_secs': 0.191823} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1038.831475] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-ebf43edd-91f3-4c77-9aa1-a539c24bc88f tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: ccc542a3-ff01-42ca-965e-706bed4c6e07] Reconfigured VM instance instance-0000005f to detach disk 2001 {{(pid=62503) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1038.836350] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-986cdfcc-c4dc-42d9-89e8-e4e71dc6cfe8 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.851416] env[62503]: DEBUG oslo_vmware.api [None req-dae72632-7a91-47ab-a7d6-43626c24f7e1 tempest-ServerAddressesNegativeTestJSON-300837030 tempest-ServerAddressesNegativeTestJSON-300837030-project-member] Task: {'id': task-1388177, 'name': PowerOnVM_Task, 'duration_secs': 0.510614} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1038.852617] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-dae72632-7a91-47ab-a7d6-43626c24f7e1 tempest-ServerAddressesNegativeTestJSON-300837030 tempest-ServerAddressesNegativeTestJSON-300837030-project-member] [instance: 5fb35ae6-bfc6-4039-aa43-de8c550aacde] Powered on the VM {{(pid=62503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1038.852786] env[62503]: INFO nova.compute.manager [None req-dae72632-7a91-47ab-a7d6-43626c24f7e1 tempest-ServerAddressesNegativeTestJSON-300837030 tempest-ServerAddressesNegativeTestJSON-300837030-project-member] [instance: 5fb35ae6-bfc6-4039-aa43-de8c550aacde] Took 7.18 seconds to spawn the instance on the hypervisor. [ 1038.852971] env[62503]: DEBUG nova.compute.manager [None req-dae72632-7a91-47ab-a7d6-43626c24f7e1 tempest-ServerAddressesNegativeTestJSON-300837030 tempest-ServerAddressesNegativeTestJSON-300837030-project-member] [instance: 5fb35ae6-bfc6-4039-aa43-de8c550aacde] Checking state {{(pid=62503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1794}} [ 1038.853300] env[62503]: DEBUG oslo_vmware.api [None req-ebf43edd-91f3-4c77-9aa1-a539c24bc88f tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Waiting for the task: (returnval){ [ 1038.853300] env[62503]: value = "task-1388178" [ 1038.853300] env[62503]: _type = "Task" [ 1038.853300] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1038.853993] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-317f25b6-7aab-4e48-bba6-c5fe463d7698 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.867148] env[62503]: DEBUG oslo_vmware.api [None req-ebf43edd-91f3-4c77-9aa1-a539c24bc88f tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1388178, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.066446] env[62503]: DEBUG oslo_concurrency.lockutils [None req-971d784d-b28d-4337-bc9f-8b2ee172d1d1 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Releasing lock "refresh_cache-987b6101-565e-4eb2-b8af-f9afd5be38ce" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1039.066794] env[62503]: DEBUG nova.compute.manager [None req-971d784d-b28d-4337-bc9f-8b2ee172d1d1 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 987b6101-565e-4eb2-b8af-f9afd5be38ce] Instance network_info: |[{"id": "e0d3c642-f374-431c-95ca-9211403e44c6", "address": "fa:16:3e:cb:55:28", "network": {"id": "62a43567-d25a-406f-a4c8-68e056f67595", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-156718245-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b86eee9480274a9196fc8ccd920671f0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b8b5b5e2-866d-4ab5-b74d-4a47de0c4877", "external-id": "nsx-vlan-transportzone-74", "segmentation_id": 74, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape0d3c642-f3", "ovs_interfaceid": "e0d3c642-f374-431c-95ca-9211403e44c6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1999}} [ 1039.067281] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-971d784d-b28d-4337-bc9f-8b2ee172d1d1 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 987b6101-565e-4eb2-b8af-f9afd5be38ce] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:cb:55:28', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b8b5b5e2-866d-4ab5-b74d-4a47de0c4877', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e0d3c642-f374-431c-95ca-9211403e44c6', 'vif_model': 'vmxnet3'}] {{(pid=62503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1039.074924] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-971d784d-b28d-4337-bc9f-8b2ee172d1d1 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Creating folder: Project (b86eee9480274a9196fc8ccd920671f0). Parent ref: group-v294540. {{(pid=62503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1039.075219] env[62503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3352e3cb-32ba-4b50-bcb9-66be66e3b2fb {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.086325] env[62503]: INFO nova.virt.vmwareapi.vm_util [None req-971d784d-b28d-4337-bc9f-8b2ee172d1d1 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Created folder: Project (b86eee9480274a9196fc8ccd920671f0) in parent group-v294540. [ 1039.086513] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-971d784d-b28d-4337-bc9f-8b2ee172d1d1 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Creating folder: Instances. Parent ref: group-v294646. {{(pid=62503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1039.086785] env[62503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1372864a-00c0-4229-9bc2-0b479a61572c {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.094662] env[62503]: INFO nova.virt.vmwareapi.vm_util [None req-971d784d-b28d-4337-bc9f-8b2ee172d1d1 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Created folder: Instances in parent group-v294646. [ 1039.094887] env[62503]: DEBUG oslo.service.loopingcall [None req-971d784d-b28d-4337-bc9f-8b2ee172d1d1 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1039.095088] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 987b6101-565e-4eb2-b8af-f9afd5be38ce] Creating VM on the ESX host {{(pid=62503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1039.095288] env[62503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-574cd61e-ca07-4bbf-9e90-8e6bb41e8fe3 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.114037] env[62503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1039.114037] env[62503]: value = "task-1388181" [ 1039.114037] env[62503]: _type = "Task" [ 1039.114037] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1039.121262] env[62503]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388181, 'name': CreateVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.165268] env[62503]: DEBUG nova.compute.manager [None req-8f8175a8-01a8-4bc7-8603-56c0993b0c7d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 485d3aba-6c0d-46c7-860b-c0dbd9c16498] Start spawning the instance on the hypervisor. {{(pid=62503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2641}} [ 1039.191324] env[62503]: DEBUG nova.virt.hardware [None req-8f8175a8-01a8-4bc7-8603-56c0993b0c7d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:26:20Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:26:03Z,direct_url=,disk_format='vmdk',id=8150ca02-f879-471d-8913-459408f127a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26d9ee75d25b4018b8bb1fb11c8bd98c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:26:04Z,virtual_size=,visibility=), allow threads: False {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1039.191615] env[62503]: DEBUG nova.virt.hardware [None req-8f8175a8-01a8-4bc7-8603-56c0993b0c7d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Flavor limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1039.192306] env[62503]: DEBUG nova.virt.hardware [None req-8f8175a8-01a8-4bc7-8603-56c0993b0c7d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Image limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1039.192306] env[62503]: DEBUG nova.virt.hardware [None req-8f8175a8-01a8-4bc7-8603-56c0993b0c7d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Flavor pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1039.192306] env[62503]: DEBUG nova.virt.hardware [None req-8f8175a8-01a8-4bc7-8603-56c0993b0c7d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Image pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1039.192510] env[62503]: DEBUG nova.virt.hardware [None req-8f8175a8-01a8-4bc7-8603-56c0993b0c7d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1039.192623] env[62503]: DEBUG nova.virt.hardware [None req-8f8175a8-01a8-4bc7-8603-56c0993b0c7d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1039.192814] env[62503]: DEBUG nova.virt.hardware [None req-8f8175a8-01a8-4bc7-8603-56c0993b0c7d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1039.193024] env[62503]: DEBUG nova.virt.hardware [None req-8f8175a8-01a8-4bc7-8603-56c0993b0c7d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Got 1 possible topologies {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1039.193209] env[62503]: DEBUG nova.virt.hardware [None req-8f8175a8-01a8-4bc7-8603-56c0993b0c7d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1039.193388] env[62503]: DEBUG nova.virt.hardware [None req-8f8175a8-01a8-4bc7-8603-56c0993b0c7d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1039.194290] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe6e68b3-8ef0-4779-95df-833af7d2b682 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.202501] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df027fb1-3f76-4e30-a1fa-e3e0d7e89c10 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.314486] env[62503]: DEBUG nova.compute.manager [req-9bf21d32-f640-4aff-9775-fdbbdf08fd03 req-7de3f8e2-2dc6-47c4-8e8f-e7d31437efdc service nova] [instance: 987b6101-565e-4eb2-b8af-f9afd5be38ce] Received event network-changed-e0d3c642-f374-431c-95ca-9211403e44c6 {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 1039.314734] env[62503]: DEBUG nova.compute.manager [req-9bf21d32-f640-4aff-9775-fdbbdf08fd03 req-7de3f8e2-2dc6-47c4-8e8f-e7d31437efdc service nova] [instance: 987b6101-565e-4eb2-b8af-f9afd5be38ce] Refreshing instance network info cache due to event network-changed-e0d3c642-f374-431c-95ca-9211403e44c6. {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11432}} [ 1039.314916] env[62503]: DEBUG oslo_concurrency.lockutils [req-9bf21d32-f640-4aff-9775-fdbbdf08fd03 req-7de3f8e2-2dc6-47c4-8e8f-e7d31437efdc service nova] Acquiring lock "refresh_cache-987b6101-565e-4eb2-b8af-f9afd5be38ce" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1039.315216] env[62503]: DEBUG oslo_concurrency.lockutils [req-9bf21d32-f640-4aff-9775-fdbbdf08fd03 req-7de3f8e2-2dc6-47c4-8e8f-e7d31437efdc service nova] Acquired lock "refresh_cache-987b6101-565e-4eb2-b8af-f9afd5be38ce" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1039.315417] env[62503]: DEBUG nova.network.neutron [req-9bf21d32-f640-4aff-9775-fdbbdf08fd03 req-7de3f8e2-2dc6-47c4-8e8f-e7d31437efdc service nova] [instance: 987b6101-565e-4eb2-b8af-f9afd5be38ce] Refreshing network info cache for port e0d3c642-f374-431c-95ca-9211403e44c6 {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1039.366821] env[62503]: DEBUG oslo_vmware.api [None req-ebf43edd-91f3-4c77-9aa1-a539c24bc88f tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1388178, 'name': ReconfigVM_Task, 'duration_secs': 0.16418} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1039.367205] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-ebf43edd-91f3-4c77-9aa1-a539c24bc88f tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: ccc542a3-ff01-42ca-965e-706bed4c6e07] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-294642', 'volume_id': '2e377e60-04b4-4faa-8609-29e7538d02e0', 'name': 'volume-2e377e60-04b4-4faa-8609-29e7538d02e0', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'ccc542a3-ff01-42ca-965e-706bed4c6e07', 'attached_at': '', 'detached_at': '', 'volume_id': '2e377e60-04b4-4faa-8609-29e7538d02e0', 'serial': '2e377e60-04b4-4faa-8609-29e7538d02e0'} {{(pid=62503) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1039.376130] env[62503]: INFO nova.compute.manager [None req-dae72632-7a91-47ab-a7d6-43626c24f7e1 tempest-ServerAddressesNegativeTestJSON-300837030 tempest-ServerAddressesNegativeTestJSON-300837030-project-member] [instance: 5fb35ae6-bfc6-4039-aa43-de8c550aacde] Took 11.94 seconds to build instance. [ 1039.608583] env[62503]: DEBUG nova.network.neutron [None req-8f8175a8-01a8-4bc7-8603-56c0993b0c7d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 485d3aba-6c0d-46c7-860b-c0dbd9c16498] Successfully updated port: b0401560-7408-4d52-a32c-906d5934c94e {{(pid=62503) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1039.624310] env[62503]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388181, 'name': CreateVM_Task, 'duration_secs': 0.340143} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1039.624625] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 987b6101-565e-4eb2-b8af-f9afd5be38ce] Created VM on the ESX host {{(pid=62503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1039.625412] env[62503]: DEBUG oslo_concurrency.lockutils [None req-971d784d-b28d-4337-bc9f-8b2ee172d1d1 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1039.625412] env[62503]: DEBUG oslo_concurrency.lockutils [None req-971d784d-b28d-4337-bc9f-8b2ee172d1d1 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Acquired lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1039.627642] env[62503]: DEBUG oslo_concurrency.lockutils [None req-971d784d-b28d-4337-bc9f-8b2ee172d1d1 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1039.627642] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2ddd2850-a849-4c79-bcf7-5a6fdcffbfbf {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.630586] env[62503]: DEBUG oslo_vmware.api [None req-971d784d-b28d-4337-bc9f-8b2ee172d1d1 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Waiting for the task: (returnval){ [ 1039.630586] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]523482b3-c8f3-ac87-e435-5a38eb696508" [ 1039.630586] env[62503]: _type = "Task" [ 1039.630586] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1039.638999] env[62503]: DEBUG oslo_vmware.api [None req-971d784d-b28d-4337-bc9f-8b2ee172d1d1 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]523482b3-c8f3-ac87-e435-5a38eb696508, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.878275] env[62503]: DEBUG oslo_concurrency.lockutils [None req-dae72632-7a91-47ab-a7d6-43626c24f7e1 tempest-ServerAddressesNegativeTestJSON-300837030 tempest-ServerAddressesNegativeTestJSON-300837030-project-member] Lock "5fb35ae6-bfc6-4039-aa43-de8c550aacde" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.452s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1040.033342] env[62503]: DEBUG nova.network.neutron [req-9bf21d32-f640-4aff-9775-fdbbdf08fd03 req-7de3f8e2-2dc6-47c4-8e8f-e7d31437efdc service nova] [instance: 987b6101-565e-4eb2-b8af-f9afd5be38ce] Updated VIF entry in instance network info cache for port e0d3c642-f374-431c-95ca-9211403e44c6. {{(pid=62503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1040.033715] env[62503]: DEBUG nova.network.neutron [req-9bf21d32-f640-4aff-9775-fdbbdf08fd03 req-7de3f8e2-2dc6-47c4-8e8f-e7d31437efdc service nova] [instance: 987b6101-565e-4eb2-b8af-f9afd5be38ce] Updating instance_info_cache with network_info: [{"id": "e0d3c642-f374-431c-95ca-9211403e44c6", "address": "fa:16:3e:cb:55:28", "network": {"id": "62a43567-d25a-406f-a4c8-68e056f67595", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-156718245-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b86eee9480274a9196fc8ccd920671f0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b8b5b5e2-866d-4ab5-b74d-4a47de0c4877", "external-id": "nsx-vlan-transportzone-74", "segmentation_id": 74, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape0d3c642-f3", "ovs_interfaceid": "e0d3c642-f374-431c-95ca-9211403e44c6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1040.071445] env[62503]: DEBUG oslo_concurrency.lockutils [None req-3e841d85-43b9-4251-ae65-78e8d6b533ac tempest-ServerAddressesNegativeTestJSON-300837030 tempest-ServerAddressesNegativeTestJSON-300837030-project-member] Acquiring lock "5fb35ae6-bfc6-4039-aa43-de8c550aacde" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1040.071702] env[62503]: DEBUG oslo_concurrency.lockutils [None req-3e841d85-43b9-4251-ae65-78e8d6b533ac tempest-ServerAddressesNegativeTestJSON-300837030 tempest-ServerAddressesNegativeTestJSON-300837030-project-member] Lock "5fb35ae6-bfc6-4039-aa43-de8c550aacde" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1040.071916] env[62503]: DEBUG oslo_concurrency.lockutils [None req-3e841d85-43b9-4251-ae65-78e8d6b533ac tempest-ServerAddressesNegativeTestJSON-300837030 tempest-ServerAddressesNegativeTestJSON-300837030-project-member] Acquiring lock "5fb35ae6-bfc6-4039-aa43-de8c550aacde-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1040.072152] env[62503]: DEBUG oslo_concurrency.lockutils [None req-3e841d85-43b9-4251-ae65-78e8d6b533ac tempest-ServerAddressesNegativeTestJSON-300837030 tempest-ServerAddressesNegativeTestJSON-300837030-project-member] Lock "5fb35ae6-bfc6-4039-aa43-de8c550aacde-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1040.072334] env[62503]: DEBUG oslo_concurrency.lockutils [None req-3e841d85-43b9-4251-ae65-78e8d6b533ac tempest-ServerAddressesNegativeTestJSON-300837030 tempest-ServerAddressesNegativeTestJSON-300837030-project-member] Lock "5fb35ae6-bfc6-4039-aa43-de8c550aacde-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1040.074365] env[62503]: INFO nova.compute.manager [None req-3e841d85-43b9-4251-ae65-78e8d6b533ac tempest-ServerAddressesNegativeTestJSON-300837030 tempest-ServerAddressesNegativeTestJSON-300837030-project-member] [instance: 5fb35ae6-bfc6-4039-aa43-de8c550aacde] Terminating instance [ 1040.076015] env[62503]: DEBUG nova.compute.manager [None req-3e841d85-43b9-4251-ae65-78e8d6b533ac tempest-ServerAddressesNegativeTestJSON-300837030 tempest-ServerAddressesNegativeTestJSON-300837030-project-member] [instance: 5fb35ae6-bfc6-4039-aa43-de8c550aacde] Start destroying the instance on the hypervisor. {{(pid=62503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3156}} [ 1040.076234] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-3e841d85-43b9-4251-ae65-78e8d6b533ac tempest-ServerAddressesNegativeTestJSON-300837030 tempest-ServerAddressesNegativeTestJSON-300837030-project-member] [instance: 5fb35ae6-bfc6-4039-aa43-de8c550aacde] Destroying instance {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1040.077257] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6ea5dc0-2b9e-4b4b-9dc8-21c8a7e503f5 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.084674] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-3e841d85-43b9-4251-ae65-78e8d6b533ac tempest-ServerAddressesNegativeTestJSON-300837030 tempest-ServerAddressesNegativeTestJSON-300837030-project-member] [instance: 5fb35ae6-bfc6-4039-aa43-de8c550aacde] Powering off the VM {{(pid=62503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1040.084899] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9a58d204-d3f9-40c6-8975-b51f8eecfc57 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.091154] env[62503]: DEBUG oslo_vmware.api [None req-3e841d85-43b9-4251-ae65-78e8d6b533ac tempest-ServerAddressesNegativeTestJSON-300837030 tempest-ServerAddressesNegativeTestJSON-300837030-project-member] Waiting for the task: (returnval){ [ 1040.091154] env[62503]: value = "task-1388182" [ 1040.091154] env[62503]: _type = "Task" [ 1040.091154] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1040.098325] env[62503]: DEBUG oslo_vmware.api [None req-3e841d85-43b9-4251-ae65-78e8d6b533ac tempest-ServerAddressesNegativeTestJSON-300837030 tempest-ServerAddressesNegativeTestJSON-300837030-project-member] Task: {'id': task-1388182, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.110619] env[62503]: DEBUG oslo_concurrency.lockutils [None req-8f8175a8-01a8-4bc7-8603-56c0993b0c7d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Acquiring lock "refresh_cache-485d3aba-6c0d-46c7-860b-c0dbd9c16498" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1040.110856] env[62503]: DEBUG oslo_concurrency.lockutils [None req-8f8175a8-01a8-4bc7-8603-56c0993b0c7d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Acquired lock "refresh_cache-485d3aba-6c0d-46c7-860b-c0dbd9c16498" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1040.111176] env[62503]: DEBUG nova.network.neutron [None req-8f8175a8-01a8-4bc7-8603-56c0993b0c7d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 485d3aba-6c0d-46c7-860b-c0dbd9c16498] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1040.140948] env[62503]: DEBUG oslo_vmware.api [None req-971d784d-b28d-4337-bc9f-8b2ee172d1d1 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]523482b3-c8f3-ac87-e435-5a38eb696508, 'name': SearchDatastore_Task, 'duration_secs': 0.010129} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1040.141257] env[62503]: DEBUG oslo_concurrency.lockutils [None req-971d784d-b28d-4337-bc9f-8b2ee172d1d1 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Releasing lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1040.141499] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-971d784d-b28d-4337-bc9f-8b2ee172d1d1 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 987b6101-565e-4eb2-b8af-f9afd5be38ce] Processing image 8150ca02-f879-471d-8913-459408f127a1 {{(pid=62503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1040.141734] env[62503]: DEBUG oslo_concurrency.lockutils [None req-971d784d-b28d-4337-bc9f-8b2ee172d1d1 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1040.141888] env[62503]: DEBUG oslo_concurrency.lockutils [None req-971d784d-b28d-4337-bc9f-8b2ee172d1d1 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Acquired lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1040.142147] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-971d784d-b28d-4337-bc9f-8b2ee172d1d1 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1040.142484] env[62503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-584cce1f-cf11-48a6-b468-5bcf8aa196a4 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.150538] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-971d784d-b28d-4337-bc9f-8b2ee172d1d1 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1040.150719] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-971d784d-b28d-4337-bc9f-8b2ee172d1d1 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1040.151421] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e4fa2fa6-d1d9-438c-880a-9424f5a02a7e {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.156860] env[62503]: DEBUG oslo_vmware.api [None req-971d784d-b28d-4337-bc9f-8b2ee172d1d1 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Waiting for the task: (returnval){ [ 1040.156860] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]5275eeb6-3db0-8736-2f6b-52407f310537" [ 1040.156860] env[62503]: _type = "Task" [ 1040.156860] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1040.164542] env[62503]: DEBUG oslo_vmware.api [None req-971d784d-b28d-4337-bc9f-8b2ee172d1d1 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]5275eeb6-3db0-8736-2f6b-52407f310537, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.420361] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-ebf43edd-91f3-4c77-9aa1-a539c24bc88f tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: ccc542a3-ff01-42ca-965e-706bed4c6e07] Powering off the VM {{(pid=62503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1040.420726] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-298f9d35-f024-45ae-9d8f-e7e92bdef04f {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.428448] env[62503]: DEBUG oslo_vmware.api [None req-ebf43edd-91f3-4c77-9aa1-a539c24bc88f tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Waiting for the task: (returnval){ [ 1040.428448] env[62503]: value = "task-1388183" [ 1040.428448] env[62503]: _type = "Task" [ 1040.428448] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1040.438401] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-ebf43edd-91f3-4c77-9aa1-a539c24bc88f tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: ccc542a3-ff01-42ca-965e-706bed4c6e07] VM already powered off {{(pid=62503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1040.438603] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-ebf43edd-91f3-4c77-9aa1-a539c24bc88f tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: ccc542a3-ff01-42ca-965e-706bed4c6e07] Volume detach. Driver type: vmdk {{(pid=62503) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1040.438797] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-ebf43edd-91f3-4c77-9aa1-a539c24bc88f tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: ccc542a3-ff01-42ca-965e-706bed4c6e07] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-294642', 'volume_id': '2e377e60-04b4-4faa-8609-29e7538d02e0', 'name': 'volume-2e377e60-04b4-4faa-8609-29e7538d02e0', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'ccc542a3-ff01-42ca-965e-706bed4c6e07', 'attached_at': '', 'detached_at': '', 'volume_id': '2e377e60-04b4-4faa-8609-29e7538d02e0', 'serial': '2e377e60-04b4-4faa-8609-29e7538d02e0'} {{(pid=62503) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1040.439493] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18948ebc-5954-4733-9857-adc4815306e4 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.457157] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cac02989-cb9b-4d5c-8f78-475e8badc230 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.464773] env[62503]: WARNING nova.virt.vmwareapi.driver [None req-ebf43edd-91f3-4c77-9aa1-a539c24bc88f tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: ccc542a3-ff01-42ca-965e-706bed4c6e07] The volume None does not exist!: nova.exception.DiskNotFound: Unable to find volume [ 1040.465051] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-ebf43edd-91f3-4c77-9aa1-a539c24bc88f tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: ccc542a3-ff01-42ca-965e-706bed4c6e07] Destroying instance {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1040.465734] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75eff5af-170a-4037-81d7-332d680a5586 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.471739] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-ebf43edd-91f3-4c77-9aa1-a539c24bc88f tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: ccc542a3-ff01-42ca-965e-706bed4c6e07] Unregistering the VM {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1040.471956] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0aa4db2e-12c9-4141-b84b-49757924a1d4 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.534066] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-ebf43edd-91f3-4c77-9aa1-a539c24bc88f tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: ccc542a3-ff01-42ca-965e-706bed4c6e07] Unregistered the VM {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1040.534066] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-ebf43edd-91f3-4c77-9aa1-a539c24bc88f tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: ccc542a3-ff01-42ca-965e-706bed4c6e07] Deleting contents of the VM from datastore datastore1 {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1040.534308] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-ebf43edd-91f3-4c77-9aa1-a539c24bc88f tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Deleting the datastore file [datastore1] ccc542a3-ff01-42ca-965e-706bed4c6e07 {{(pid=62503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1040.534442] env[62503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-584bb242-2e7b-44cf-86f6-0a0a86b6ee82 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.536691] env[62503]: DEBUG oslo_concurrency.lockutils [req-9bf21d32-f640-4aff-9775-fdbbdf08fd03 req-7de3f8e2-2dc6-47c4-8e8f-e7d31437efdc service nova] Releasing lock "refresh_cache-987b6101-565e-4eb2-b8af-f9afd5be38ce" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1040.541942] env[62503]: DEBUG oslo_vmware.api [None req-ebf43edd-91f3-4c77-9aa1-a539c24bc88f tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Waiting for the task: (returnval){ [ 1040.541942] env[62503]: value = "task-1388185" [ 1040.541942] env[62503]: _type = "Task" [ 1040.541942] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1040.549236] env[62503]: DEBUG oslo_vmware.api [None req-ebf43edd-91f3-4c77-9aa1-a539c24bc88f tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1388185, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.600828] env[62503]: DEBUG oslo_vmware.api [None req-3e841d85-43b9-4251-ae65-78e8d6b533ac tempest-ServerAddressesNegativeTestJSON-300837030 tempest-ServerAddressesNegativeTestJSON-300837030-project-member] Task: {'id': task-1388182, 'name': PowerOffVM_Task, 'duration_secs': 0.194126} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1040.601113] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-3e841d85-43b9-4251-ae65-78e8d6b533ac tempest-ServerAddressesNegativeTestJSON-300837030 tempest-ServerAddressesNegativeTestJSON-300837030-project-member] [instance: 5fb35ae6-bfc6-4039-aa43-de8c550aacde] Powered off the VM {{(pid=62503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1040.601336] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-3e841d85-43b9-4251-ae65-78e8d6b533ac tempest-ServerAddressesNegativeTestJSON-300837030 tempest-ServerAddressesNegativeTestJSON-300837030-project-member] [instance: 5fb35ae6-bfc6-4039-aa43-de8c550aacde] Unregistering the VM {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1040.601610] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a59652b4-73ee-4c09-b894-1d8d1d391850 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.641856] env[62503]: DEBUG nova.network.neutron [None req-8f8175a8-01a8-4bc7-8603-56c0993b0c7d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 485d3aba-6c0d-46c7-860b-c0dbd9c16498] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1040.664042] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-3e841d85-43b9-4251-ae65-78e8d6b533ac tempest-ServerAddressesNegativeTestJSON-300837030 tempest-ServerAddressesNegativeTestJSON-300837030-project-member] [instance: 5fb35ae6-bfc6-4039-aa43-de8c550aacde] Unregistered the VM {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1040.664042] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-3e841d85-43b9-4251-ae65-78e8d6b533ac tempest-ServerAddressesNegativeTestJSON-300837030 tempest-ServerAddressesNegativeTestJSON-300837030-project-member] [instance: 5fb35ae6-bfc6-4039-aa43-de8c550aacde] Deleting contents of the VM from datastore datastore2 {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1040.664042] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-3e841d85-43b9-4251-ae65-78e8d6b533ac tempest-ServerAddressesNegativeTestJSON-300837030 tempest-ServerAddressesNegativeTestJSON-300837030-project-member] Deleting the datastore file [datastore2] 5fb35ae6-bfc6-4039-aa43-de8c550aacde {{(pid=62503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1040.664636] env[62503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-328046e2-d064-4d78-be05-26df31bd4bd7 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.669480] env[62503]: DEBUG oslo_vmware.api [None req-971d784d-b28d-4337-bc9f-8b2ee172d1d1 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]5275eeb6-3db0-8736-2f6b-52407f310537, 'name': SearchDatastore_Task, 'duration_secs': 0.009425} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1040.670566] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-61101182-7de6-4841-acc2-185a1840fbec {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.673663] env[62503]: DEBUG oslo_vmware.api [None req-3e841d85-43b9-4251-ae65-78e8d6b533ac tempest-ServerAddressesNegativeTestJSON-300837030 tempest-ServerAddressesNegativeTestJSON-300837030-project-member] Waiting for the task: (returnval){ [ 1040.673663] env[62503]: value = "task-1388187" [ 1040.673663] env[62503]: _type = "Task" [ 1040.673663] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1040.677898] env[62503]: DEBUG oslo_vmware.api [None req-971d784d-b28d-4337-bc9f-8b2ee172d1d1 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Waiting for the task: (returnval){ [ 1040.677898] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52b9f089-60b0-8652-eeaa-8864065fcf8b" [ 1040.677898] env[62503]: _type = "Task" [ 1040.677898] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1040.683692] env[62503]: DEBUG oslo_vmware.api [None req-3e841d85-43b9-4251-ae65-78e8d6b533ac tempest-ServerAddressesNegativeTestJSON-300837030 tempest-ServerAddressesNegativeTestJSON-300837030-project-member] Task: {'id': task-1388187, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.690376] env[62503]: DEBUG oslo_vmware.api [None req-971d784d-b28d-4337-bc9f-8b2ee172d1d1 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52b9f089-60b0-8652-eeaa-8864065fcf8b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.818183] env[62503]: DEBUG nova.network.neutron [None req-8f8175a8-01a8-4bc7-8603-56c0993b0c7d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 485d3aba-6c0d-46c7-860b-c0dbd9c16498] Updating instance_info_cache with network_info: [{"id": "b0401560-7408-4d52-a32c-906d5934c94e", "address": "fa:16:3e:e5:bf:4d", "network": {"id": "62a43567-d25a-406f-a4c8-68e056f67595", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-156718245-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b86eee9480274a9196fc8ccd920671f0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b8b5b5e2-866d-4ab5-b74d-4a47de0c4877", "external-id": "nsx-vlan-transportzone-74", "segmentation_id": 74, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb0401560-74", "ovs_interfaceid": "b0401560-7408-4d52-a32c-906d5934c94e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1041.052191] env[62503]: DEBUG oslo_vmware.api [None req-ebf43edd-91f3-4c77-9aa1-a539c24bc88f tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1388185, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.143982} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1041.052488] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-ebf43edd-91f3-4c77-9aa1-a539c24bc88f tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Deleted the datastore file {{(pid=62503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1041.052681] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-ebf43edd-91f3-4c77-9aa1-a539c24bc88f tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: ccc542a3-ff01-42ca-965e-706bed4c6e07] Deleted contents of the VM from datastore datastore1 {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1041.052857] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-ebf43edd-91f3-4c77-9aa1-a539c24bc88f tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: ccc542a3-ff01-42ca-965e-706bed4c6e07] Instance destroyed {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1041.184382] env[62503]: DEBUG oslo_vmware.api [None req-3e841d85-43b9-4251-ae65-78e8d6b533ac tempest-ServerAddressesNegativeTestJSON-300837030 tempest-ServerAddressesNegativeTestJSON-300837030-project-member] Task: {'id': task-1388187, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.135203} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1041.185102] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-3e841d85-43b9-4251-ae65-78e8d6b533ac tempest-ServerAddressesNegativeTestJSON-300837030 tempest-ServerAddressesNegativeTestJSON-300837030-project-member] Deleted the datastore file {{(pid=62503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1041.185382] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-3e841d85-43b9-4251-ae65-78e8d6b533ac tempest-ServerAddressesNegativeTestJSON-300837030 tempest-ServerAddressesNegativeTestJSON-300837030-project-member] [instance: 5fb35ae6-bfc6-4039-aa43-de8c550aacde] Deleted contents of the VM from datastore datastore2 {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1041.185591] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-3e841d85-43b9-4251-ae65-78e8d6b533ac tempest-ServerAddressesNegativeTestJSON-300837030 tempest-ServerAddressesNegativeTestJSON-300837030-project-member] [instance: 5fb35ae6-bfc6-4039-aa43-de8c550aacde] Instance destroyed {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1041.185796] env[62503]: INFO nova.compute.manager [None req-3e841d85-43b9-4251-ae65-78e8d6b533ac tempest-ServerAddressesNegativeTestJSON-300837030 tempest-ServerAddressesNegativeTestJSON-300837030-project-member] [instance: 5fb35ae6-bfc6-4039-aa43-de8c550aacde] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1041.186126] env[62503]: DEBUG oslo.service.loopingcall [None req-3e841d85-43b9-4251-ae65-78e8d6b533ac tempest-ServerAddressesNegativeTestJSON-300837030 tempest-ServerAddressesNegativeTestJSON-300837030-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1041.189387] env[62503]: DEBUG nova.compute.manager [-] [instance: 5fb35ae6-bfc6-4039-aa43-de8c550aacde] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 1041.189509] env[62503]: DEBUG nova.network.neutron [-] [instance: 5fb35ae6-bfc6-4039-aa43-de8c550aacde] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1041.191165] env[62503]: DEBUG oslo_vmware.api [None req-971d784d-b28d-4337-bc9f-8b2ee172d1d1 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52b9f089-60b0-8652-eeaa-8864065fcf8b, 'name': SearchDatastore_Task, 'duration_secs': 0.016481} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1041.191409] env[62503]: DEBUG oslo_concurrency.lockutils [None req-971d784d-b28d-4337-bc9f-8b2ee172d1d1 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Releasing lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1041.191649] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-971d784d-b28d-4337-bc9f-8b2ee172d1d1 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk to [datastore1] 987b6101-565e-4eb2-b8af-f9afd5be38ce/987b6101-565e-4eb2-b8af-f9afd5be38ce.vmdk {{(pid=62503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1041.192178] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-10f77ebc-9a07-4467-bdff-f77a9c862937 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.198030] env[62503]: DEBUG oslo_vmware.api [None req-971d784d-b28d-4337-bc9f-8b2ee172d1d1 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Waiting for the task: (returnval){ [ 1041.198030] env[62503]: value = "task-1388188" [ 1041.198030] env[62503]: _type = "Task" [ 1041.198030] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1041.205420] env[62503]: DEBUG oslo_vmware.api [None req-971d784d-b28d-4337-bc9f-8b2ee172d1d1 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Task: {'id': task-1388188, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.321230] env[62503]: DEBUG oslo_concurrency.lockutils [None req-8f8175a8-01a8-4bc7-8603-56c0993b0c7d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Releasing lock "refresh_cache-485d3aba-6c0d-46c7-860b-c0dbd9c16498" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1041.321655] env[62503]: DEBUG nova.compute.manager [None req-8f8175a8-01a8-4bc7-8603-56c0993b0c7d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 485d3aba-6c0d-46c7-860b-c0dbd9c16498] Instance network_info: |[{"id": "b0401560-7408-4d52-a32c-906d5934c94e", "address": "fa:16:3e:e5:bf:4d", "network": {"id": "62a43567-d25a-406f-a4c8-68e056f67595", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-156718245-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b86eee9480274a9196fc8ccd920671f0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b8b5b5e2-866d-4ab5-b74d-4a47de0c4877", "external-id": "nsx-vlan-transportzone-74", "segmentation_id": 74, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb0401560-74", "ovs_interfaceid": "b0401560-7408-4d52-a32c-906d5934c94e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1999}} [ 1041.322192] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-8f8175a8-01a8-4bc7-8603-56c0993b0c7d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 485d3aba-6c0d-46c7-860b-c0dbd9c16498] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e5:bf:4d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b8b5b5e2-866d-4ab5-b74d-4a47de0c4877', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b0401560-7408-4d52-a32c-906d5934c94e', 'vif_model': 'vmxnet3'}] {{(pid=62503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1041.332023] env[62503]: DEBUG oslo.service.loopingcall [None req-8f8175a8-01a8-4bc7-8603-56c0993b0c7d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1041.332023] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 485d3aba-6c0d-46c7-860b-c0dbd9c16498] Creating VM on the ESX host {{(pid=62503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1041.332023] env[62503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-677a2644-b344-4f77-9460-e0adcda8bd96 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.351562] env[62503]: DEBUG nova.compute.manager [req-e6c836ef-6bee-45b2-91b6-c05720272ebe req-365ce267-952e-4b64-96c9-b748727c0502 service nova] [instance: 485d3aba-6c0d-46c7-860b-c0dbd9c16498] Received event network-vif-plugged-b0401560-7408-4d52-a32c-906d5934c94e {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 1041.351562] env[62503]: DEBUG oslo_concurrency.lockutils [req-e6c836ef-6bee-45b2-91b6-c05720272ebe req-365ce267-952e-4b64-96c9-b748727c0502 service nova] Acquiring lock "485d3aba-6c0d-46c7-860b-c0dbd9c16498-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1041.351562] env[62503]: DEBUG oslo_concurrency.lockutils [req-e6c836ef-6bee-45b2-91b6-c05720272ebe req-365ce267-952e-4b64-96c9-b748727c0502 service nova] Lock "485d3aba-6c0d-46c7-860b-c0dbd9c16498-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1041.351562] env[62503]: DEBUG oslo_concurrency.lockutils [req-e6c836ef-6bee-45b2-91b6-c05720272ebe req-365ce267-952e-4b64-96c9-b748727c0502 service nova] Lock "485d3aba-6c0d-46c7-860b-c0dbd9c16498-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1041.351825] env[62503]: DEBUG nova.compute.manager [req-e6c836ef-6bee-45b2-91b6-c05720272ebe req-365ce267-952e-4b64-96c9-b748727c0502 service nova] [instance: 485d3aba-6c0d-46c7-860b-c0dbd9c16498] No waiting events found dispatching network-vif-plugged-b0401560-7408-4d52-a32c-906d5934c94e {{(pid=62503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1041.351970] env[62503]: WARNING nova.compute.manager [req-e6c836ef-6bee-45b2-91b6-c05720272ebe req-365ce267-952e-4b64-96c9-b748727c0502 service nova] [instance: 485d3aba-6c0d-46c7-860b-c0dbd9c16498] Received unexpected event network-vif-plugged-b0401560-7408-4d52-a32c-906d5934c94e for instance with vm_state building and task_state spawning. [ 1041.352233] env[62503]: DEBUG nova.compute.manager [req-e6c836ef-6bee-45b2-91b6-c05720272ebe req-365ce267-952e-4b64-96c9-b748727c0502 service nova] [instance: 485d3aba-6c0d-46c7-860b-c0dbd9c16498] Received event network-changed-b0401560-7408-4d52-a32c-906d5934c94e {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 1041.352443] env[62503]: DEBUG nova.compute.manager [req-e6c836ef-6bee-45b2-91b6-c05720272ebe req-365ce267-952e-4b64-96c9-b748727c0502 service nova] [instance: 485d3aba-6c0d-46c7-860b-c0dbd9c16498] Refreshing instance network info cache due to event network-changed-b0401560-7408-4d52-a32c-906d5934c94e. {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11432}} [ 1041.352658] env[62503]: DEBUG oslo_concurrency.lockutils [req-e6c836ef-6bee-45b2-91b6-c05720272ebe req-365ce267-952e-4b64-96c9-b748727c0502 service nova] Acquiring lock "refresh_cache-485d3aba-6c0d-46c7-860b-c0dbd9c16498" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1041.352831] env[62503]: DEBUG oslo_concurrency.lockutils [req-e6c836ef-6bee-45b2-91b6-c05720272ebe req-365ce267-952e-4b64-96c9-b748727c0502 service nova] Acquired lock "refresh_cache-485d3aba-6c0d-46c7-860b-c0dbd9c16498" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1041.353028] env[62503]: DEBUG nova.network.neutron [req-e6c836ef-6bee-45b2-91b6-c05720272ebe req-365ce267-952e-4b64-96c9-b748727c0502 service nova] [instance: 485d3aba-6c0d-46c7-860b-c0dbd9c16498] Refreshing network info cache for port b0401560-7408-4d52-a32c-906d5934c94e {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1041.360935] env[62503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1041.360935] env[62503]: value = "task-1388189" [ 1041.360935] env[62503]: _type = "Task" [ 1041.360935] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1041.371285] env[62503]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388189, 'name': CreateVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.558042] env[62503]: INFO nova.virt.block_device [None req-ebf43edd-91f3-4c77-9aa1-a539c24bc88f tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: ccc542a3-ff01-42ca-965e-706bed4c6e07] Booting with volume 2e377e60-04b4-4faa-8609-29e7538d02e0 at /dev/sdb [ 1041.596552] env[62503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-bc5277b4-ccdf-411c-a58f-59ebe50f27a5 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.607996] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-445eb1da-e6d6-4fa3-b0f4-ad2237f39bda {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.636156] env[62503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-94776afa-1e74-4342-acf7-7ac9f8e83a85 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.644363] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5374a88-2c74-46a2-9a34-8ab949a0760e {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.668728] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34364dea-3c4f-4d86-921f-353c87b7255f {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.675310] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-854eb2ea-8457-48e6-bfbb-35814c62cd3c {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.688074] env[62503]: DEBUG nova.virt.block_device [None req-ebf43edd-91f3-4c77-9aa1-a539c24bc88f tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: ccc542a3-ff01-42ca-965e-706bed4c6e07] Updating existing volume attachment record: 7d433675-f5eb-45aa-b777-63c3717a2488 {{(pid=62503) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1041.707359] env[62503]: DEBUG oslo_vmware.api [None req-971d784d-b28d-4337-bc9f-8b2ee172d1d1 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Task: {'id': task-1388188, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.439372} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1041.707679] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-971d784d-b28d-4337-bc9f-8b2ee172d1d1 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk to [datastore1] 987b6101-565e-4eb2-b8af-f9afd5be38ce/987b6101-565e-4eb2-b8af-f9afd5be38ce.vmdk {{(pid=62503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1041.707935] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-971d784d-b28d-4337-bc9f-8b2ee172d1d1 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 987b6101-565e-4eb2-b8af-f9afd5be38ce] Extending root virtual disk to 1048576 {{(pid=62503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1041.708220] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f2a7e629-36b1-409d-ad2c-6a4a268f4995 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.715014] env[62503]: DEBUG oslo_vmware.api [None req-971d784d-b28d-4337-bc9f-8b2ee172d1d1 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Waiting for the task: (returnval){ [ 1041.715014] env[62503]: value = "task-1388190" [ 1041.715014] env[62503]: _type = "Task" [ 1041.715014] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1041.728513] env[62503]: DEBUG oslo_vmware.api [None req-971d784d-b28d-4337-bc9f-8b2ee172d1d1 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Task: {'id': task-1388190, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.871185] env[62503]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388189, 'name': CreateVM_Task, 'duration_secs': 0.395257} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1041.871379] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 485d3aba-6c0d-46c7-860b-c0dbd9c16498] Created VM on the ESX host {{(pid=62503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1041.872040] env[62503]: DEBUG oslo_concurrency.lockutils [None req-8f8175a8-01a8-4bc7-8603-56c0993b0c7d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1041.872291] env[62503]: DEBUG oslo_concurrency.lockutils [None req-8f8175a8-01a8-4bc7-8603-56c0993b0c7d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Acquired lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1041.872620] env[62503]: DEBUG oslo_concurrency.lockutils [None req-8f8175a8-01a8-4bc7-8603-56c0993b0c7d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1041.872873] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bdb9b33f-1130-4ed8-b106-52b525573829 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.877281] env[62503]: DEBUG oslo_vmware.api [None req-8f8175a8-01a8-4bc7-8603-56c0993b0c7d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Waiting for the task: (returnval){ [ 1041.877281] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]526fa7a2-3732-3c11-758a-bd11a8b59a5a" [ 1041.877281] env[62503]: _type = "Task" [ 1041.877281] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1041.885424] env[62503]: DEBUG oslo_vmware.api [None req-8f8175a8-01a8-4bc7-8603-56c0993b0c7d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]526fa7a2-3732-3c11-758a-bd11a8b59a5a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.917753] env[62503]: DEBUG nova.network.neutron [-] [instance: 5fb35ae6-bfc6-4039-aa43-de8c550aacde] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1042.226468] env[62503]: DEBUG oslo_vmware.api [None req-971d784d-b28d-4337-bc9f-8b2ee172d1d1 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Task: {'id': task-1388190, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.061388} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1042.226619] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-971d784d-b28d-4337-bc9f-8b2ee172d1d1 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 987b6101-565e-4eb2-b8af-f9afd5be38ce] Extended root virtual disk {{(pid=62503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1042.227328] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5ecf724-544a-44b1-b526-cceb05ca3650 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.251349] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-971d784d-b28d-4337-bc9f-8b2ee172d1d1 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 987b6101-565e-4eb2-b8af-f9afd5be38ce] Reconfiguring VM instance instance-00000067 to attach disk [datastore1] 987b6101-565e-4eb2-b8af-f9afd5be38ce/987b6101-565e-4eb2-b8af-f9afd5be38ce.vmdk or device None with type sparse {{(pid=62503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1042.252290] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-218695fa-3a6b-436c-8a2c-ed64dfe76521 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.266381] env[62503]: DEBUG nova.network.neutron [req-e6c836ef-6bee-45b2-91b6-c05720272ebe req-365ce267-952e-4b64-96c9-b748727c0502 service nova] [instance: 485d3aba-6c0d-46c7-860b-c0dbd9c16498] Updated VIF entry in instance network info cache for port b0401560-7408-4d52-a32c-906d5934c94e. {{(pid=62503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1042.266731] env[62503]: DEBUG nova.network.neutron [req-e6c836ef-6bee-45b2-91b6-c05720272ebe req-365ce267-952e-4b64-96c9-b748727c0502 service nova] [instance: 485d3aba-6c0d-46c7-860b-c0dbd9c16498] Updating instance_info_cache with network_info: [{"id": "b0401560-7408-4d52-a32c-906d5934c94e", "address": "fa:16:3e:e5:bf:4d", "network": {"id": "62a43567-d25a-406f-a4c8-68e056f67595", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-156718245-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b86eee9480274a9196fc8ccd920671f0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b8b5b5e2-866d-4ab5-b74d-4a47de0c4877", "external-id": "nsx-vlan-transportzone-74", "segmentation_id": 74, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb0401560-74", "ovs_interfaceid": "b0401560-7408-4d52-a32c-906d5934c94e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1042.273674] env[62503]: DEBUG oslo_vmware.api [None req-971d784d-b28d-4337-bc9f-8b2ee172d1d1 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Waiting for the task: (returnval){ [ 1042.273674] env[62503]: value = "task-1388191" [ 1042.273674] env[62503]: _type = "Task" [ 1042.273674] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1042.282390] env[62503]: DEBUG oslo_vmware.api [None req-971d784d-b28d-4337-bc9f-8b2ee172d1d1 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Task: {'id': task-1388191, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.387456] env[62503]: DEBUG oslo_vmware.api [None req-8f8175a8-01a8-4bc7-8603-56c0993b0c7d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]526fa7a2-3732-3c11-758a-bd11a8b59a5a, 'name': SearchDatastore_Task, 'duration_secs': 0.008566} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1042.387762] env[62503]: DEBUG oslo_concurrency.lockutils [None req-8f8175a8-01a8-4bc7-8603-56c0993b0c7d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Releasing lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1042.388010] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-8f8175a8-01a8-4bc7-8603-56c0993b0c7d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 485d3aba-6c0d-46c7-860b-c0dbd9c16498] Processing image 8150ca02-f879-471d-8913-459408f127a1 {{(pid=62503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1042.388251] env[62503]: DEBUG oslo_concurrency.lockutils [None req-8f8175a8-01a8-4bc7-8603-56c0993b0c7d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1042.388403] env[62503]: DEBUG oslo_concurrency.lockutils [None req-8f8175a8-01a8-4bc7-8603-56c0993b0c7d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Acquired lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1042.388584] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-8f8175a8-01a8-4bc7-8603-56c0993b0c7d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1042.388832] env[62503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c4da892a-1996-4d17-afb2-cd2876a53de8 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.396682] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-8f8175a8-01a8-4bc7-8603-56c0993b0c7d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1042.396871] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-8f8175a8-01a8-4bc7-8603-56c0993b0c7d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1042.397522] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-25c88004-3c11-478a-816b-78439ba22f68 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.402487] env[62503]: DEBUG oslo_vmware.api [None req-8f8175a8-01a8-4bc7-8603-56c0993b0c7d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Waiting for the task: (returnval){ [ 1042.402487] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52590805-1147-bf95-549b-f0a02e3fe45d" [ 1042.402487] env[62503]: _type = "Task" [ 1042.402487] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1042.409616] env[62503]: DEBUG oslo_vmware.api [None req-8f8175a8-01a8-4bc7-8603-56c0993b0c7d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52590805-1147-bf95-549b-f0a02e3fe45d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.420206] env[62503]: INFO nova.compute.manager [-] [instance: 5fb35ae6-bfc6-4039-aa43-de8c550aacde] Took 1.23 seconds to deallocate network for instance. [ 1042.769341] env[62503]: DEBUG oslo_concurrency.lockutils [req-e6c836ef-6bee-45b2-91b6-c05720272ebe req-365ce267-952e-4b64-96c9-b748727c0502 service nova] Releasing lock "refresh_cache-485d3aba-6c0d-46c7-860b-c0dbd9c16498" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1042.784284] env[62503]: DEBUG oslo_vmware.api [None req-971d784d-b28d-4337-bc9f-8b2ee172d1d1 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Task: {'id': task-1388191, 'name': ReconfigVM_Task, 'duration_secs': 0.263426} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1042.784575] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-971d784d-b28d-4337-bc9f-8b2ee172d1d1 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 987b6101-565e-4eb2-b8af-f9afd5be38ce] Reconfigured VM instance instance-00000067 to attach disk [datastore1] 987b6101-565e-4eb2-b8af-f9afd5be38ce/987b6101-565e-4eb2-b8af-f9afd5be38ce.vmdk or device None with type sparse {{(pid=62503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1042.785207] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ee1d77c7-a3fc-4169-b6ac-0d6003576a88 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.791185] env[62503]: DEBUG oslo_vmware.api [None req-971d784d-b28d-4337-bc9f-8b2ee172d1d1 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Waiting for the task: (returnval){ [ 1042.791185] env[62503]: value = "task-1388192" [ 1042.791185] env[62503]: _type = "Task" [ 1042.791185] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1042.800512] env[62503]: DEBUG oslo_vmware.api [None req-971d784d-b28d-4337-bc9f-8b2ee172d1d1 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Task: {'id': task-1388192, 'name': Rename_Task} progress is 5%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.915041] env[62503]: DEBUG oslo_vmware.api [None req-8f8175a8-01a8-4bc7-8603-56c0993b0c7d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52590805-1147-bf95-549b-f0a02e3fe45d, 'name': SearchDatastore_Task, 'duration_secs': 0.00809} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1042.915970] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-99fab770-d49d-429f-b38c-29fd10550cb2 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.921951] env[62503]: DEBUG oslo_vmware.api [None req-8f8175a8-01a8-4bc7-8603-56c0993b0c7d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Waiting for the task: (returnval){ [ 1042.921951] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]5269fea7-869b-bd19-18b2-a5d1072cc6ca" [ 1042.921951] env[62503]: _type = "Task" [ 1042.921951] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1042.926898] env[62503]: DEBUG oslo_concurrency.lockutils [None req-3e841d85-43b9-4251-ae65-78e8d6b533ac tempest-ServerAddressesNegativeTestJSON-300837030 tempest-ServerAddressesNegativeTestJSON-300837030-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1042.927172] env[62503]: DEBUG oslo_concurrency.lockutils [None req-3e841d85-43b9-4251-ae65-78e8d6b533ac tempest-ServerAddressesNegativeTestJSON-300837030 tempest-ServerAddressesNegativeTestJSON-300837030-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1042.927396] env[62503]: DEBUG nova.objects.instance [None req-3e841d85-43b9-4251-ae65-78e8d6b533ac tempest-ServerAddressesNegativeTestJSON-300837030 tempest-ServerAddressesNegativeTestJSON-300837030-project-member] Lazy-loading 'resources' on Instance uuid 5fb35ae6-bfc6-4039-aa43-de8c550aacde {{(pid=62503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1042.931217] env[62503]: DEBUG oslo_vmware.api [None req-8f8175a8-01a8-4bc7-8603-56c0993b0c7d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]5269fea7-869b-bd19-18b2-a5d1072cc6ca, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.301550] env[62503]: DEBUG oslo_vmware.api [None req-971d784d-b28d-4337-bc9f-8b2ee172d1d1 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Task: {'id': task-1388192, 'name': Rename_Task, 'duration_secs': 0.12953} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1043.302529] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-971d784d-b28d-4337-bc9f-8b2ee172d1d1 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 987b6101-565e-4eb2-b8af-f9afd5be38ce] Powering on the VM {{(pid=62503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1043.302879] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0b8cce01-0450-4aea-bca0-5d7196757f33 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.309567] env[62503]: DEBUG oslo_vmware.api [None req-971d784d-b28d-4337-bc9f-8b2ee172d1d1 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Waiting for the task: (returnval){ [ 1043.309567] env[62503]: value = "task-1388193" [ 1043.309567] env[62503]: _type = "Task" [ 1043.309567] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1043.316847] env[62503]: DEBUG oslo_vmware.api [None req-971d784d-b28d-4337-bc9f-8b2ee172d1d1 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Task: {'id': task-1388193, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.370876] env[62503]: DEBUG nova.compute.manager [req-0ba00bf3-af62-410b-a312-aae0c4600231 req-30375045-28e3-409c-a3df-5bd4d4c33933 service nova] [instance: 5fb35ae6-bfc6-4039-aa43-de8c550aacde] Received event network-vif-deleted-1cdefdaf-8339-4d4f-a744-b28fbeb535db {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 1043.431812] env[62503]: DEBUG oslo_vmware.api [None req-8f8175a8-01a8-4bc7-8603-56c0993b0c7d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]5269fea7-869b-bd19-18b2-a5d1072cc6ca, 'name': SearchDatastore_Task, 'duration_secs': 0.008998} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1043.434467] env[62503]: DEBUG oslo_concurrency.lockutils [None req-8f8175a8-01a8-4bc7-8603-56c0993b0c7d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Releasing lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1043.434764] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f8175a8-01a8-4bc7-8603-56c0993b0c7d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk to [datastore1] 485d3aba-6c0d-46c7-860b-c0dbd9c16498/485d3aba-6c0d-46c7-860b-c0dbd9c16498.vmdk {{(pid=62503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1043.435255] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9caa1f9b-34ff-4f23-ba47-1246b2b05ac6 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.442053] env[62503]: DEBUG oslo_vmware.api [None req-8f8175a8-01a8-4bc7-8603-56c0993b0c7d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Waiting for the task: (returnval){ [ 1043.442053] env[62503]: value = "task-1388194" [ 1043.442053] env[62503]: _type = "Task" [ 1043.442053] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1043.450263] env[62503]: DEBUG oslo_vmware.api [None req-8f8175a8-01a8-4bc7-8603-56c0993b0c7d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Task: {'id': task-1388194, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.521845] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe3ad43c-2b56-4dd5-9e5d-2d8f263a70f0 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.529345] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f77a400-9ddb-43c5-9614-96b76fb2af86 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.559558] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9c3a73f-b6ac-4c4e-b72c-02de7decd426 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.567037] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5660d66-444c-4620-bd00-2941ce432275 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.580945] env[62503]: DEBUG nova.compute.provider_tree [None req-3e841d85-43b9-4251-ae65-78e8d6b533ac tempest-ServerAddressesNegativeTestJSON-300837030 tempest-ServerAddressesNegativeTestJSON-300837030-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1043.800941] env[62503]: DEBUG nova.virt.hardware [None req-ebf43edd-91f3-4c77-9aa1-a539c24bc88f tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:26:20Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:26:03Z,direct_url=,disk_format='vmdk',id=8150ca02-f879-471d-8913-459408f127a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26d9ee75d25b4018b8bb1fb11c8bd98c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:26:04Z,virtual_size=,visibility=), allow threads: False {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1043.801433] env[62503]: DEBUG nova.virt.hardware [None req-ebf43edd-91f3-4c77-9aa1-a539c24bc88f tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Flavor limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1043.801433] env[62503]: DEBUG nova.virt.hardware [None req-ebf43edd-91f3-4c77-9aa1-a539c24bc88f tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Image limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1043.801613] env[62503]: DEBUG nova.virt.hardware [None req-ebf43edd-91f3-4c77-9aa1-a539c24bc88f tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Flavor pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1043.801771] env[62503]: DEBUG nova.virt.hardware [None req-ebf43edd-91f3-4c77-9aa1-a539c24bc88f tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Image pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1043.801921] env[62503]: DEBUG nova.virt.hardware [None req-ebf43edd-91f3-4c77-9aa1-a539c24bc88f tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1043.802218] env[62503]: DEBUG nova.virt.hardware [None req-ebf43edd-91f3-4c77-9aa1-a539c24bc88f tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1043.802402] env[62503]: DEBUG nova.virt.hardware [None req-ebf43edd-91f3-4c77-9aa1-a539c24bc88f tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1043.802583] env[62503]: DEBUG nova.virt.hardware [None req-ebf43edd-91f3-4c77-9aa1-a539c24bc88f tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Got 1 possible topologies {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1043.802751] env[62503]: DEBUG nova.virt.hardware [None req-ebf43edd-91f3-4c77-9aa1-a539c24bc88f tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1043.802934] env[62503]: DEBUG nova.virt.hardware [None req-ebf43edd-91f3-4c77-9aa1-a539c24bc88f tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1043.803862] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a3e6662-fec3-4364-a22b-8f9b2d48a367 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.815107] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8490bce-0653-42e1-b4a6-97d2bb716428 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.824017] env[62503]: DEBUG oslo_vmware.api [None req-971d784d-b28d-4337-bc9f-8b2ee172d1d1 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Task: {'id': task-1388193, 'name': PowerOnVM_Task, 'duration_secs': 0.492245} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1043.831808] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-971d784d-b28d-4337-bc9f-8b2ee172d1d1 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 987b6101-565e-4eb2-b8af-f9afd5be38ce] Powered on the VM {{(pid=62503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1043.832068] env[62503]: INFO nova.compute.manager [None req-971d784d-b28d-4337-bc9f-8b2ee172d1d1 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 987b6101-565e-4eb2-b8af-f9afd5be38ce] Took 6.86 seconds to spawn the instance on the hypervisor. [ 1043.832307] env[62503]: DEBUG nova.compute.manager [None req-971d784d-b28d-4337-bc9f-8b2ee172d1d1 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 987b6101-565e-4eb2-b8af-f9afd5be38ce] Checking state {{(pid=62503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1794}} [ 1043.832857] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-ebf43edd-91f3-4c77-9aa1-a539c24bc88f tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: ccc542a3-ff01-42ca-965e-706bed4c6e07] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:25:71:4d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '77aa121f-8fb6-42f3-aaea-43addfe449b2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a06bfb1f-0b51-4150-8e23-cdfe68e9c27f', 'vif_model': 'vmxnet3'}] {{(pid=62503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1043.840432] env[62503]: DEBUG oslo.service.loopingcall [None req-ebf43edd-91f3-4c77-9aa1-a539c24bc88f tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1043.841229] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a26de80-de82-4c8d-bd51-f901fcbb736a {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.843934] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ccc542a3-ff01-42ca-965e-706bed4c6e07] Creating VM on the ESX host {{(pid=62503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1043.844185] env[62503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8639d3bf-6c2a-499e-a431-5a0a06c80755 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.868352] env[62503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1043.868352] env[62503]: value = "task-1388195" [ 1043.868352] env[62503]: _type = "Task" [ 1043.868352] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1043.876946] env[62503]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388195, 'name': CreateVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.951873] env[62503]: DEBUG oslo_vmware.api [None req-8f8175a8-01a8-4bc7-8603-56c0993b0c7d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Task: {'id': task-1388194, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.452989} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1043.952332] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f8175a8-01a8-4bc7-8603-56c0993b0c7d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk to [datastore1] 485d3aba-6c0d-46c7-860b-c0dbd9c16498/485d3aba-6c0d-46c7-860b-c0dbd9c16498.vmdk {{(pid=62503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1043.952464] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-8f8175a8-01a8-4bc7-8603-56c0993b0c7d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 485d3aba-6c0d-46c7-860b-c0dbd9c16498] Extending root virtual disk to 1048576 {{(pid=62503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1043.952697] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9dc22642-e8e3-4f23-9c03-fbf1d0b40d78 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.959139] env[62503]: DEBUG oslo_vmware.api [None req-8f8175a8-01a8-4bc7-8603-56c0993b0c7d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Waiting for the task: (returnval){ [ 1043.959139] env[62503]: value = "task-1388196" [ 1043.959139] env[62503]: _type = "Task" [ 1043.959139] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1043.966643] env[62503]: DEBUG oslo_vmware.api [None req-8f8175a8-01a8-4bc7-8603-56c0993b0c7d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Task: {'id': task-1388196, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.084257] env[62503]: DEBUG nova.scheduler.client.report [None req-3e841d85-43b9-4251-ae65-78e8d6b533ac tempest-ServerAddressesNegativeTestJSON-300837030 tempest-ServerAddressesNegativeTestJSON-300837030-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1044.377881] env[62503]: INFO nova.compute.manager [None req-971d784d-b28d-4337-bc9f-8b2ee172d1d1 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 987b6101-565e-4eb2-b8af-f9afd5be38ce] Took 11.61 seconds to build instance. [ 1044.384281] env[62503]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388195, 'name': CreateVM_Task, 'duration_secs': 0.341222} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1044.384461] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ccc542a3-ff01-42ca-965e-706bed4c6e07] Created VM on the ESX host {{(pid=62503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1044.385431] env[62503]: DEBUG oslo_concurrency.lockutils [None req-ebf43edd-91f3-4c77-9aa1-a539c24bc88f tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1044.385600] env[62503]: DEBUG oslo_concurrency.lockutils [None req-ebf43edd-91f3-4c77-9aa1-a539c24bc88f tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1044.385910] env[62503]: DEBUG oslo_concurrency.lockutils [None req-ebf43edd-91f3-4c77-9aa1-a539c24bc88f tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1044.386162] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f9a02e8c-6f41-46e3-8ccc-77d2d651b692 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.390282] env[62503]: DEBUG oslo_vmware.api [None req-ebf43edd-91f3-4c77-9aa1-a539c24bc88f tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Waiting for the task: (returnval){ [ 1044.390282] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52098343-1392-7c7a-4eda-665b0eb8e180" [ 1044.390282] env[62503]: _type = "Task" [ 1044.390282] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1044.398195] env[62503]: DEBUG oslo_vmware.api [None req-ebf43edd-91f3-4c77-9aa1-a539c24bc88f tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52098343-1392-7c7a-4eda-665b0eb8e180, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.467452] env[62503]: DEBUG oslo_vmware.api [None req-8f8175a8-01a8-4bc7-8603-56c0993b0c7d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Task: {'id': task-1388196, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063931} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1044.467716] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-8f8175a8-01a8-4bc7-8603-56c0993b0c7d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 485d3aba-6c0d-46c7-860b-c0dbd9c16498] Extended root virtual disk {{(pid=62503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1044.468437] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bd8f062-1dd8-4b1f-b069-ec99a339b75b {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.489175] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-8f8175a8-01a8-4bc7-8603-56c0993b0c7d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 485d3aba-6c0d-46c7-860b-c0dbd9c16498] Reconfiguring VM instance instance-00000068 to attach disk [datastore1] 485d3aba-6c0d-46c7-860b-c0dbd9c16498/485d3aba-6c0d-46c7-860b-c0dbd9c16498.vmdk or device None with type sparse {{(pid=62503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1044.489393] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-94efe2c0-2370-444d-afcf-7523fb014139 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.506981] env[62503]: DEBUG oslo_vmware.api [None req-8f8175a8-01a8-4bc7-8603-56c0993b0c7d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Waiting for the task: (returnval){ [ 1044.506981] env[62503]: value = "task-1388197" [ 1044.506981] env[62503]: _type = "Task" [ 1044.506981] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1044.517052] env[62503]: DEBUG oslo_vmware.api [None req-8f8175a8-01a8-4bc7-8603-56c0993b0c7d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Task: {'id': task-1388197, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.589512] env[62503]: DEBUG oslo_concurrency.lockutils [None req-3e841d85-43b9-4251-ae65-78e8d6b533ac tempest-ServerAddressesNegativeTestJSON-300837030 tempest-ServerAddressesNegativeTestJSON-300837030-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.662s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1044.614230] env[62503]: INFO nova.scheduler.client.report [None req-3e841d85-43b9-4251-ae65-78e8d6b533ac tempest-ServerAddressesNegativeTestJSON-300837030 tempest-ServerAddressesNegativeTestJSON-300837030-project-member] Deleted allocations for instance 5fb35ae6-bfc6-4039-aa43-de8c550aacde [ 1044.880450] env[62503]: DEBUG oslo_concurrency.lockutils [None req-971d784d-b28d-4337-bc9f-8b2ee172d1d1 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Lock "987b6101-565e-4eb2-b8af-f9afd5be38ce" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.124s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1044.900348] env[62503]: DEBUG oslo_vmware.api [None req-ebf43edd-91f3-4c77-9aa1-a539c24bc88f tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52098343-1392-7c7a-4eda-665b0eb8e180, 'name': SearchDatastore_Task, 'duration_secs': 0.008365} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1044.900639] env[62503]: DEBUG oslo_concurrency.lockutils [None req-ebf43edd-91f3-4c77-9aa1-a539c24bc88f tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1044.900877] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-ebf43edd-91f3-4c77-9aa1-a539c24bc88f tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: ccc542a3-ff01-42ca-965e-706bed4c6e07] Processing image 8150ca02-f879-471d-8913-459408f127a1 {{(pid=62503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1044.901132] env[62503]: DEBUG oslo_concurrency.lockutils [None req-ebf43edd-91f3-4c77-9aa1-a539c24bc88f tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1044.901287] env[62503]: DEBUG oslo_concurrency.lockutils [None req-ebf43edd-91f3-4c77-9aa1-a539c24bc88f tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1044.901467] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-ebf43edd-91f3-4c77-9aa1-a539c24bc88f tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1044.901722] env[62503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6a58c640-1ee1-448a-b671-41a68b9a3b22 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.910386] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-ebf43edd-91f3-4c77-9aa1-a539c24bc88f tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1044.910561] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-ebf43edd-91f3-4c77-9aa1-a539c24bc88f tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1044.911245] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aab8f6c9-fb69-4474-9c4f-4a62dd07fc7d {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.916019] env[62503]: DEBUG oslo_vmware.api [None req-ebf43edd-91f3-4c77-9aa1-a539c24bc88f tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Waiting for the task: (returnval){ [ 1044.916019] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]5282e67a-5639-ebca-5e9e-cf383ad8ccd2" [ 1044.916019] env[62503]: _type = "Task" [ 1044.916019] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1044.922975] env[62503]: DEBUG oslo_vmware.api [None req-ebf43edd-91f3-4c77-9aa1-a539c24bc88f tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]5282e67a-5639-ebca-5e9e-cf383ad8ccd2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.017994] env[62503]: DEBUG oslo_vmware.api [None req-8f8175a8-01a8-4bc7-8603-56c0993b0c7d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Task: {'id': task-1388197, 'name': ReconfigVM_Task, 'duration_secs': 0.276323} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1045.018388] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-8f8175a8-01a8-4bc7-8603-56c0993b0c7d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 485d3aba-6c0d-46c7-860b-c0dbd9c16498] Reconfigured VM instance instance-00000068 to attach disk [datastore1] 485d3aba-6c0d-46c7-860b-c0dbd9c16498/485d3aba-6c0d-46c7-860b-c0dbd9c16498.vmdk or device None with type sparse {{(pid=62503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1045.019113] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-208e6839-a7b3-416b-ad62-5a6401cd1eb7 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.025761] env[62503]: DEBUG oslo_vmware.api [None req-8f8175a8-01a8-4bc7-8603-56c0993b0c7d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Waiting for the task: (returnval){ [ 1045.025761] env[62503]: value = "task-1388198" [ 1045.025761] env[62503]: _type = "Task" [ 1045.025761] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1045.033254] env[62503]: DEBUG oslo_vmware.api [None req-8f8175a8-01a8-4bc7-8603-56c0993b0c7d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Task: {'id': task-1388198, 'name': Rename_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.123623] env[62503]: DEBUG oslo_concurrency.lockutils [None req-3e841d85-43b9-4251-ae65-78e8d6b533ac tempest-ServerAddressesNegativeTestJSON-300837030 tempest-ServerAddressesNegativeTestJSON-300837030-project-member] Lock "5fb35ae6-bfc6-4039-aa43-de8c550aacde" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.052s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1045.426422] env[62503]: DEBUG oslo_vmware.api [None req-ebf43edd-91f3-4c77-9aa1-a539c24bc88f tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]5282e67a-5639-ebca-5e9e-cf383ad8ccd2, 'name': SearchDatastore_Task, 'duration_secs': 0.007657} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1045.427499] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7d109c3d-bdab-40c7-b625-abd44f3641d9 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.432784] env[62503]: DEBUG oslo_vmware.api [None req-ebf43edd-91f3-4c77-9aa1-a539c24bc88f tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Waiting for the task: (returnval){ [ 1045.432784] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]527e69c5-1d04-75b8-c702-e90e6aea49ab" [ 1045.432784] env[62503]: _type = "Task" [ 1045.432784] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1045.439934] env[62503]: DEBUG oslo_vmware.api [None req-ebf43edd-91f3-4c77-9aa1-a539c24bc88f tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]527e69c5-1d04-75b8-c702-e90e6aea49ab, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.536508] env[62503]: DEBUG oslo_vmware.api [None req-8f8175a8-01a8-4bc7-8603-56c0993b0c7d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Task: {'id': task-1388198, 'name': Rename_Task, 'duration_secs': 0.133065} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1045.536773] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f8175a8-01a8-4bc7-8603-56c0993b0c7d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 485d3aba-6c0d-46c7-860b-c0dbd9c16498] Powering on the VM {{(pid=62503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1045.537017] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7cc0de71-ba56-4454-a673-60455cd1f8e2 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.543868] env[62503]: DEBUG oslo_vmware.api [None req-8f8175a8-01a8-4bc7-8603-56c0993b0c7d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Waiting for the task: (returnval){ [ 1045.543868] env[62503]: value = "task-1388199" [ 1045.543868] env[62503]: _type = "Task" [ 1045.543868] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1045.552211] env[62503]: DEBUG oslo_vmware.api [None req-8f8175a8-01a8-4bc7-8603-56c0993b0c7d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Task: {'id': task-1388199, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.945036] env[62503]: DEBUG oslo_vmware.api [None req-ebf43edd-91f3-4c77-9aa1-a539c24bc88f tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]527e69c5-1d04-75b8-c702-e90e6aea49ab, 'name': SearchDatastore_Task, 'duration_secs': 0.008095} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1045.945526] env[62503]: DEBUG oslo_concurrency.lockutils [None req-ebf43edd-91f3-4c77-9aa1-a539c24bc88f tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1045.945882] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-ebf43edd-91f3-4c77-9aa1-a539c24bc88f tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk to [datastore2] ccc542a3-ff01-42ca-965e-706bed4c6e07/ccc542a3-ff01-42ca-965e-706bed4c6e07.vmdk {{(pid=62503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1045.946283] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3110e190-2cce-4c12-a9f3-54a1b979ebdf {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.952937] env[62503]: DEBUG oslo_vmware.api [None req-ebf43edd-91f3-4c77-9aa1-a539c24bc88f tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Waiting for the task: (returnval){ [ 1045.952937] env[62503]: value = "task-1388200" [ 1045.952937] env[62503]: _type = "Task" [ 1045.952937] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1045.961054] env[62503]: DEBUG oslo_vmware.api [None req-ebf43edd-91f3-4c77-9aa1-a539c24bc88f tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1388200, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.053567] env[62503]: DEBUG oslo_vmware.api [None req-8f8175a8-01a8-4bc7-8603-56c0993b0c7d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Task: {'id': task-1388199, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.462963] env[62503]: DEBUG oslo_vmware.api [None req-ebf43edd-91f3-4c77-9aa1-a539c24bc88f tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1388200, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.453225} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1046.463201] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-ebf43edd-91f3-4c77-9aa1-a539c24bc88f tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk to [datastore2] ccc542a3-ff01-42ca-965e-706bed4c6e07/ccc542a3-ff01-42ca-965e-706bed4c6e07.vmdk {{(pid=62503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1046.463417] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-ebf43edd-91f3-4c77-9aa1-a539c24bc88f tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: ccc542a3-ff01-42ca-965e-706bed4c6e07] Extending root virtual disk to 1048576 {{(pid=62503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1046.463672] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c1a7ccad-7ded-467a-a295-116cc2eaeea5 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.469503] env[62503]: DEBUG oslo_vmware.api [None req-ebf43edd-91f3-4c77-9aa1-a539c24bc88f tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Waiting for the task: (returnval){ [ 1046.469503] env[62503]: value = "task-1388201" [ 1046.469503] env[62503]: _type = "Task" [ 1046.469503] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1046.476930] env[62503]: DEBUG oslo_vmware.api [None req-ebf43edd-91f3-4c77-9aa1-a539c24bc88f tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1388201, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.554721] env[62503]: DEBUG oslo_vmware.api [None req-8f8175a8-01a8-4bc7-8603-56c0993b0c7d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Task: {'id': task-1388199, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.980443] env[62503]: DEBUG oslo_vmware.api [None req-ebf43edd-91f3-4c77-9aa1-a539c24bc88f tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1388201, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.089477} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1046.980847] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-ebf43edd-91f3-4c77-9aa1-a539c24bc88f tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: ccc542a3-ff01-42ca-965e-706bed4c6e07] Extended root virtual disk {{(pid=62503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1046.981496] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59464fe7-9aab-4407-ba43-24a84e784b11 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.003432] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-ebf43edd-91f3-4c77-9aa1-a539c24bc88f tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: ccc542a3-ff01-42ca-965e-706bed4c6e07] Reconfiguring VM instance instance-0000005f to attach disk [datastore2] ccc542a3-ff01-42ca-965e-706bed4c6e07/ccc542a3-ff01-42ca-965e-706bed4c6e07.vmdk or device None with type sparse {{(pid=62503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1047.003812] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7b18a874-1e4c-47b9-91f2-6e3f93bdcfd0 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.022430] env[62503]: DEBUG oslo_vmware.api [None req-ebf43edd-91f3-4c77-9aa1-a539c24bc88f tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Waiting for the task: (returnval){ [ 1047.022430] env[62503]: value = "task-1388202" [ 1047.022430] env[62503]: _type = "Task" [ 1047.022430] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1047.029737] env[62503]: DEBUG oslo_vmware.api [None req-ebf43edd-91f3-4c77-9aa1-a539c24bc88f tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1388202, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.053990] env[62503]: DEBUG oslo_vmware.api [None req-8f8175a8-01a8-4bc7-8603-56c0993b0c7d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Task: {'id': task-1388199, 'name': PowerOnVM_Task, 'duration_secs': 1.108278} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1047.054368] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f8175a8-01a8-4bc7-8603-56c0993b0c7d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 485d3aba-6c0d-46c7-860b-c0dbd9c16498] Powered on the VM {{(pid=62503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1047.054633] env[62503]: INFO nova.compute.manager [None req-8f8175a8-01a8-4bc7-8603-56c0993b0c7d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 485d3aba-6c0d-46c7-860b-c0dbd9c16498] Took 7.89 seconds to spawn the instance on the hypervisor. [ 1047.054824] env[62503]: DEBUG nova.compute.manager [None req-8f8175a8-01a8-4bc7-8603-56c0993b0c7d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 485d3aba-6c0d-46c7-860b-c0dbd9c16498] Checking state {{(pid=62503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1794}} [ 1047.055595] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9596b21c-547b-4183-9433-e7cf7e612fb0 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.532973] env[62503]: DEBUG oslo_vmware.api [None req-ebf43edd-91f3-4c77-9aa1-a539c24bc88f tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1388202, 'name': ReconfigVM_Task, 'duration_secs': 0.266631} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1047.533373] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-ebf43edd-91f3-4c77-9aa1-a539c24bc88f tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: ccc542a3-ff01-42ca-965e-706bed4c6e07] Reconfigured VM instance instance-0000005f to attach disk [datastore2] ccc542a3-ff01-42ca-965e-706bed4c6e07/ccc542a3-ff01-42ca-965e-706bed4c6e07.vmdk or device None with type sparse {{(pid=62503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1047.534672] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-ebf43edd-91f3-4c77-9aa1-a539c24bc88f tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: ccc542a3-ff01-42ca-965e-706bed4c6e07] Block device information present: {'root_device_name': '/dev/sda', 'image': [{'boot_index': 0, 'device_name': '/dev/sda', 'device_type': 'disk', 'guest_format': None, 'encryption_format': None, 'size': 0, 'encryption_secret_uuid': None, 'encryption_options': None, 'encrypted': False, 'disk_bus': None, 'image_id': '8150ca02-f879-471d-8913-459408f127a1'}], 'ephemerals': [], 'block_device_mapping': [{'boot_index': None, 'delete_on_termination': False, 'device_type': None, 'guest_format': None, 'attachment_id': '7d433675-f5eb-45aa-b777-63c3717a2488', 'mount_device': '/dev/sdb', 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-294642', 'volume_id': '2e377e60-04b4-4faa-8609-29e7538d02e0', 'name': 'volume-2e377e60-04b4-4faa-8609-29e7538d02e0', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'ccc542a3-ff01-42ca-965e-706bed4c6e07', 'attached_at': '', 'detached_at': '', 'volume_id': '2e377e60-04b4-4faa-8609-29e7538d02e0', 'serial': '2e377e60-04b4-4faa-8609-29e7538d02e0'}, 'disk_bus': None, 'volume_type': None}], 'swap': None} {{(pid=62503) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1047.534885] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-ebf43edd-91f3-4c77-9aa1-a539c24bc88f tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: ccc542a3-ff01-42ca-965e-706bed4c6e07] Volume attach. Driver type: vmdk {{(pid=62503) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1047.535087] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-ebf43edd-91f3-4c77-9aa1-a539c24bc88f tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: ccc542a3-ff01-42ca-965e-706bed4c6e07] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-294642', 'volume_id': '2e377e60-04b4-4faa-8609-29e7538d02e0', 'name': 'volume-2e377e60-04b4-4faa-8609-29e7538d02e0', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'ccc542a3-ff01-42ca-965e-706bed4c6e07', 'attached_at': '', 'detached_at': '', 'volume_id': '2e377e60-04b4-4faa-8609-29e7538d02e0', 'serial': '2e377e60-04b4-4faa-8609-29e7538d02e0'} {{(pid=62503) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1047.535851] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a590effd-b650-4d06-83a9-2e52985fe897 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.552059] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0867f8e-23b1-4f25-9db2-4c5f56b2bb87 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.579773] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-ebf43edd-91f3-4c77-9aa1-a539c24bc88f tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: ccc542a3-ff01-42ca-965e-706bed4c6e07] Reconfiguring VM instance instance-0000005f to attach disk [datastore2] volume-2e377e60-04b4-4faa-8609-29e7538d02e0/volume-2e377e60-04b4-4faa-8609-29e7538d02e0.vmdk or device None with type thin {{(pid=62503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1047.582052] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-193b73e8-e94c-48bd-999a-70f1f294ac04 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.595909] env[62503]: INFO nova.compute.manager [None req-8f8175a8-01a8-4bc7-8603-56c0993b0c7d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 485d3aba-6c0d-46c7-860b-c0dbd9c16498] Took 14.46 seconds to build instance. [ 1047.602368] env[62503]: DEBUG oslo_vmware.api [None req-ebf43edd-91f3-4c77-9aa1-a539c24bc88f tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Waiting for the task: (returnval){ [ 1047.602368] env[62503]: value = "task-1388203" [ 1047.602368] env[62503]: _type = "Task" [ 1047.602368] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1047.610129] env[62503]: DEBUG oslo_vmware.api [None req-ebf43edd-91f3-4c77-9aa1-a539c24bc88f tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1388203, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.098618] env[62503]: DEBUG oslo_concurrency.lockutils [None req-8f8175a8-01a8-4bc7-8603-56c0993b0c7d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Lock "485d3aba-6c0d-46c7-860b-c0dbd9c16498" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.973s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1048.111803] env[62503]: DEBUG oslo_vmware.api [None req-ebf43edd-91f3-4c77-9aa1-a539c24bc88f tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1388203, 'name': ReconfigVM_Task, 'duration_secs': 0.394926} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1048.112095] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-ebf43edd-91f3-4c77-9aa1-a539c24bc88f tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: ccc542a3-ff01-42ca-965e-706bed4c6e07] Reconfigured VM instance instance-0000005f to attach disk [datastore2] volume-2e377e60-04b4-4faa-8609-29e7538d02e0/volume-2e377e60-04b4-4faa-8609-29e7538d02e0.vmdk or device None with type thin {{(pid=62503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1048.116771] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9d2154c1-fcda-4724-89a0-a745582fa1ec {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.131314] env[62503]: DEBUG oslo_vmware.api [None req-ebf43edd-91f3-4c77-9aa1-a539c24bc88f tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Waiting for the task: (returnval){ [ 1048.131314] env[62503]: value = "task-1388204" [ 1048.131314] env[62503]: _type = "Task" [ 1048.131314] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1048.138737] env[62503]: DEBUG oslo_vmware.api [None req-ebf43edd-91f3-4c77-9aa1-a539c24bc88f tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1388204, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.641752] env[62503]: DEBUG oslo_vmware.api [None req-ebf43edd-91f3-4c77-9aa1-a539c24bc88f tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1388204, 'name': ReconfigVM_Task, 'duration_secs': 0.141357} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1048.642021] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-ebf43edd-91f3-4c77-9aa1-a539c24bc88f tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: ccc542a3-ff01-42ca-965e-706bed4c6e07] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-294642', 'volume_id': '2e377e60-04b4-4faa-8609-29e7538d02e0', 'name': 'volume-2e377e60-04b4-4faa-8609-29e7538d02e0', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'ccc542a3-ff01-42ca-965e-706bed4c6e07', 'attached_at': '', 'detached_at': '', 'volume_id': '2e377e60-04b4-4faa-8609-29e7538d02e0', 'serial': '2e377e60-04b4-4faa-8609-29e7538d02e0'} {{(pid=62503) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1048.642660] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5c171df1-f085-4698-af74-a26ee0972a48 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.649839] env[62503]: DEBUG oslo_vmware.api [None req-ebf43edd-91f3-4c77-9aa1-a539c24bc88f tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Waiting for the task: (returnval){ [ 1048.649839] env[62503]: value = "task-1388205" [ 1048.649839] env[62503]: _type = "Task" [ 1048.649839] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1048.658300] env[62503]: DEBUG oslo_vmware.api [None req-ebf43edd-91f3-4c77-9aa1-a539c24bc88f tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1388205, 'name': Rename_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.667841] env[62503]: INFO nova.compute.manager [None req-94ca5c44-b57b-4538-a270-8dbf9cf90708 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 485d3aba-6c0d-46c7-860b-c0dbd9c16498] Rescuing [ 1048.668094] env[62503]: DEBUG oslo_concurrency.lockutils [None req-94ca5c44-b57b-4538-a270-8dbf9cf90708 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Acquiring lock "refresh_cache-485d3aba-6c0d-46c7-860b-c0dbd9c16498" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1048.668358] env[62503]: DEBUG oslo_concurrency.lockutils [None req-94ca5c44-b57b-4538-a270-8dbf9cf90708 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Acquired lock "refresh_cache-485d3aba-6c0d-46c7-860b-c0dbd9c16498" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1048.668404] env[62503]: DEBUG nova.network.neutron [None req-94ca5c44-b57b-4538-a270-8dbf9cf90708 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 485d3aba-6c0d-46c7-860b-c0dbd9c16498] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1049.161332] env[62503]: DEBUG oslo_vmware.api [None req-ebf43edd-91f3-4c77-9aa1-a539c24bc88f tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1388205, 'name': Rename_Task, 'duration_secs': 0.144193} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1049.161726] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-ebf43edd-91f3-4c77-9aa1-a539c24bc88f tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: ccc542a3-ff01-42ca-965e-706bed4c6e07] Powering on the VM {{(pid=62503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1049.161847] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-83a0cd31-4299-4594-88e6-685fd9e0e75b {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.169225] env[62503]: DEBUG oslo_vmware.api [None req-ebf43edd-91f3-4c77-9aa1-a539c24bc88f tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Waiting for the task: (returnval){ [ 1049.169225] env[62503]: value = "task-1388206" [ 1049.169225] env[62503]: _type = "Task" [ 1049.169225] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1049.178771] env[62503]: DEBUG oslo_vmware.api [None req-ebf43edd-91f3-4c77-9aa1-a539c24bc88f tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1388206, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.354935] env[62503]: DEBUG nova.network.neutron [None req-94ca5c44-b57b-4538-a270-8dbf9cf90708 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 485d3aba-6c0d-46c7-860b-c0dbd9c16498] Updating instance_info_cache with network_info: [{"id": "b0401560-7408-4d52-a32c-906d5934c94e", "address": "fa:16:3e:e5:bf:4d", "network": {"id": "62a43567-d25a-406f-a4c8-68e056f67595", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-156718245-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b86eee9480274a9196fc8ccd920671f0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b8b5b5e2-866d-4ab5-b74d-4a47de0c4877", "external-id": "nsx-vlan-transportzone-74", "segmentation_id": 74, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb0401560-74", "ovs_interfaceid": "b0401560-7408-4d52-a32c-906d5934c94e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1049.679542] env[62503]: DEBUG oslo_vmware.api [None req-ebf43edd-91f3-4c77-9aa1-a539c24bc88f tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1388206, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.858019] env[62503]: DEBUG oslo_concurrency.lockutils [None req-94ca5c44-b57b-4538-a270-8dbf9cf90708 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Releasing lock "refresh_cache-485d3aba-6c0d-46c7-860b-c0dbd9c16498" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1050.181281] env[62503]: DEBUG oslo_vmware.api [None req-ebf43edd-91f3-4c77-9aa1-a539c24bc88f tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1388206, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.680672] env[62503]: DEBUG oslo_vmware.api [None req-ebf43edd-91f3-4c77-9aa1-a539c24bc88f tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1388206, 'name': PowerOnVM_Task, 'duration_secs': 1.039101} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1050.680929] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-ebf43edd-91f3-4c77-9aa1-a539c24bc88f tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: ccc542a3-ff01-42ca-965e-706bed4c6e07] Powered on the VM {{(pid=62503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1050.681152] env[62503]: DEBUG nova.compute.manager [None req-ebf43edd-91f3-4c77-9aa1-a539c24bc88f tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: ccc542a3-ff01-42ca-965e-706bed4c6e07] Checking state {{(pid=62503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1794}} [ 1050.681889] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4c1418d-f296-4623-be23-c897728ddedb {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.891764] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-94ca5c44-b57b-4538-a270-8dbf9cf90708 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 485d3aba-6c0d-46c7-860b-c0dbd9c16498] Powering off the VM {{(pid=62503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1050.891764] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c6efad70-a155-45ba-9abd-74d7729231a6 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.899437] env[62503]: DEBUG oslo_vmware.api [None req-94ca5c44-b57b-4538-a270-8dbf9cf90708 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Waiting for the task: (returnval){ [ 1050.899437] env[62503]: value = "task-1388207" [ 1050.899437] env[62503]: _type = "Task" [ 1050.899437] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1050.908769] env[62503]: DEBUG oslo_vmware.api [None req-94ca5c44-b57b-4538-a270-8dbf9cf90708 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Task: {'id': task-1388207, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.198684] env[62503]: DEBUG oslo_concurrency.lockutils [None req-ebf43edd-91f3-4c77-9aa1-a539c24bc88f tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1051.198970] env[62503]: DEBUG oslo_concurrency.lockutils [None req-ebf43edd-91f3-4c77-9aa1-a539c24bc88f tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1051.199178] env[62503]: DEBUG nova.objects.instance [None req-ebf43edd-91f3-4c77-9aa1-a539c24bc88f tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: ccc542a3-ff01-42ca-965e-706bed4c6e07] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62503) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1051.410233] env[62503]: DEBUG oslo_vmware.api [None req-94ca5c44-b57b-4538-a270-8dbf9cf90708 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Task: {'id': task-1388207, 'name': PowerOffVM_Task, 'duration_secs': 0.15532} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1051.410630] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-94ca5c44-b57b-4538-a270-8dbf9cf90708 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 485d3aba-6c0d-46c7-860b-c0dbd9c16498] Powered off the VM {{(pid=62503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1051.411446] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0463bd41-9d0d-47eb-aff7-a64936df50ba {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.431553] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b70275c0-9899-462e-99d3-67ddacdb75d8 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.462863] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-94ca5c44-b57b-4538-a270-8dbf9cf90708 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 485d3aba-6c0d-46c7-860b-c0dbd9c16498] Powering off the VM {{(pid=62503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1051.463420] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-60aa2551-03ee-4d72-b823-f6e1cbe709e6 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.469413] env[62503]: DEBUG oslo_vmware.api [None req-94ca5c44-b57b-4538-a270-8dbf9cf90708 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Waiting for the task: (returnval){ [ 1051.469413] env[62503]: value = "task-1388208" [ 1051.469413] env[62503]: _type = "Task" [ 1051.469413] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1051.476986] env[62503]: DEBUG oslo_vmware.api [None req-94ca5c44-b57b-4538-a270-8dbf9cf90708 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Task: {'id': task-1388208, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.938287] env[62503]: DEBUG oslo_concurrency.lockutils [None req-c6c62791-322b-4c1e-9e79-d30b48a46e01 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Acquiring lock "ccc542a3-ff01-42ca-965e-706bed4c6e07" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1051.938537] env[62503]: DEBUG oslo_concurrency.lockutils [None req-c6c62791-322b-4c1e-9e79-d30b48a46e01 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Lock "ccc542a3-ff01-42ca-965e-706bed4c6e07" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1051.979431] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-94ca5c44-b57b-4538-a270-8dbf9cf90708 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 485d3aba-6c0d-46c7-860b-c0dbd9c16498] VM already powered off {{(pid=62503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1051.979664] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-94ca5c44-b57b-4538-a270-8dbf9cf90708 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 485d3aba-6c0d-46c7-860b-c0dbd9c16498] Processing image 8150ca02-f879-471d-8913-459408f127a1 {{(pid=62503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1051.979907] env[62503]: DEBUG oslo_concurrency.lockutils [None req-94ca5c44-b57b-4538-a270-8dbf9cf90708 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1051.980085] env[62503]: DEBUG oslo_concurrency.lockutils [None req-94ca5c44-b57b-4538-a270-8dbf9cf90708 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Acquired lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1051.980302] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-94ca5c44-b57b-4538-a270-8dbf9cf90708 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1051.980560] env[62503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-54303acb-32c1-4010-9802-53d47c1c27ed {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.989623] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-94ca5c44-b57b-4538-a270-8dbf9cf90708 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1051.989857] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-94ca5c44-b57b-4538-a270-8dbf9cf90708 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1051.990608] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-013e7cf5-70d0-4194-8ee7-bd4a4cec4e3b {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.995309] env[62503]: DEBUG oslo_vmware.api [None req-94ca5c44-b57b-4538-a270-8dbf9cf90708 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Waiting for the task: (returnval){ [ 1051.995309] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]525dc9fc-8217-eea3-953d-e33b6106f8b4" [ 1051.995309] env[62503]: _type = "Task" [ 1051.995309] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1052.002439] env[62503]: DEBUG oslo_vmware.api [None req-94ca5c44-b57b-4538-a270-8dbf9cf90708 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]525dc9fc-8217-eea3-953d-e33b6106f8b4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.207893] env[62503]: DEBUG oslo_concurrency.lockutils [None req-ebf43edd-91f3-4c77-9aa1-a539c24bc88f tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.009s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1052.441692] env[62503]: INFO nova.compute.manager [None req-c6c62791-322b-4c1e-9e79-d30b48a46e01 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: ccc542a3-ff01-42ca-965e-706bed4c6e07] Detaching volume 2e377e60-04b4-4faa-8609-29e7538d02e0 [ 1052.472085] env[62503]: INFO nova.virt.block_device [None req-c6c62791-322b-4c1e-9e79-d30b48a46e01 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: ccc542a3-ff01-42ca-965e-706bed4c6e07] Attempting to driver detach volume 2e377e60-04b4-4faa-8609-29e7538d02e0 from mountpoint /dev/sdb [ 1052.472370] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-c6c62791-322b-4c1e-9e79-d30b48a46e01 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: ccc542a3-ff01-42ca-965e-706bed4c6e07] Volume detach. Driver type: vmdk {{(pid=62503) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1052.472566] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-c6c62791-322b-4c1e-9e79-d30b48a46e01 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: ccc542a3-ff01-42ca-965e-706bed4c6e07] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-294642', 'volume_id': '2e377e60-04b4-4faa-8609-29e7538d02e0', 'name': 'volume-2e377e60-04b4-4faa-8609-29e7538d02e0', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'ccc542a3-ff01-42ca-965e-706bed4c6e07', 'attached_at': '', 'detached_at': '', 'volume_id': '2e377e60-04b4-4faa-8609-29e7538d02e0', 'serial': '2e377e60-04b4-4faa-8609-29e7538d02e0'} {{(pid=62503) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1052.473443] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f3c2cd0-817e-449e-be60-d26cd292bfe4 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.494043] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c705837f-b53c-4b66-b4de-38bc0b54d04f {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.504299] env[62503]: DEBUG oslo_vmware.api [None req-94ca5c44-b57b-4538-a270-8dbf9cf90708 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]525dc9fc-8217-eea3-953d-e33b6106f8b4, 'name': SearchDatastore_Task, 'duration_secs': 0.046734} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1052.506349] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-01e0fe5a-8934-4f9a-b0a8-cd2c5819482d {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.508651] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abaf0471-7b5c-4b4f-92a9-976479338cbb {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.513618] env[62503]: DEBUG oslo_vmware.api [None req-94ca5c44-b57b-4538-a270-8dbf9cf90708 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Waiting for the task: (returnval){ [ 1052.513618] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]520b0d32-a8f3-af4a-ddaf-92d0a49739a4" [ 1052.513618] env[62503]: _type = "Task" [ 1052.513618] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1053.189991] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33db36a1-7edd-4b45-b5ca-823c1ebb1df7 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.199015] env[62503]: DEBUG oslo_vmware.api [None req-94ca5c44-b57b-4538-a270-8dbf9cf90708 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]520b0d32-a8f3-af4a-ddaf-92d0a49739a4, 'name': SearchDatastore_Task, 'duration_secs': 0.023972} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1053.209186] env[62503]: DEBUG oslo_concurrency.lockutils [None req-94ca5c44-b57b-4538-a270-8dbf9cf90708 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Releasing lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1053.209558] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-94ca5c44-b57b-4538-a270-8dbf9cf90708 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk to [datastore1] 485d3aba-6c0d-46c7-860b-c0dbd9c16498/8150ca02-f879-471d-8913-459408f127a1-rescue.vmdk. {{(pid=62503) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1053.209770] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-c6c62791-322b-4c1e-9e79-d30b48a46e01 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] The volume has not been displaced from its original location: [datastore2] volume-2e377e60-04b4-4faa-8609-29e7538d02e0/volume-2e377e60-04b4-4faa-8609-29e7538d02e0.vmdk. No consolidation needed. {{(pid=62503) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1053.214870] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-c6c62791-322b-4c1e-9e79-d30b48a46e01 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: ccc542a3-ff01-42ca-965e-706bed4c6e07] Reconfiguring VM instance instance-0000005f to detach disk 2001 {{(pid=62503) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1053.215159] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6c8d0a64-456a-45e0-8b24-ec5d90625a67 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.217130] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f085b50e-6574-440f-8c43-89f0794b393c {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.235677] env[62503]: DEBUG oslo_vmware.api [None req-c6c62791-322b-4c1e-9e79-d30b48a46e01 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Waiting for the task: (returnval){ [ 1053.235677] env[62503]: value = "task-1388210" [ 1053.235677] env[62503]: _type = "Task" [ 1053.235677] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1053.236845] env[62503]: DEBUG oslo_vmware.api [None req-94ca5c44-b57b-4538-a270-8dbf9cf90708 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Waiting for the task: (returnval){ [ 1053.236845] env[62503]: value = "task-1388209" [ 1053.236845] env[62503]: _type = "Task" [ 1053.236845] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1053.247682] env[62503]: DEBUG oslo_vmware.api [None req-c6c62791-322b-4c1e-9e79-d30b48a46e01 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1388210, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.250348] env[62503]: DEBUG oslo_vmware.api [None req-94ca5c44-b57b-4538-a270-8dbf9cf90708 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Task: {'id': task-1388209, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.748995] env[62503]: DEBUG oslo_vmware.api [None req-c6c62791-322b-4c1e-9e79-d30b48a46e01 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1388210, 'name': ReconfigVM_Task, 'duration_secs': 0.232828} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1053.751892] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-c6c62791-322b-4c1e-9e79-d30b48a46e01 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: ccc542a3-ff01-42ca-965e-706bed4c6e07] Reconfigured VM instance instance-0000005f to detach disk 2001 {{(pid=62503) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1053.756395] env[62503]: DEBUG oslo_vmware.api [None req-94ca5c44-b57b-4538-a270-8dbf9cf90708 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Task: {'id': task-1388209, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.439676} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1053.756598] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a5cb484c-01be-4088-b7ec-3ab5ca7fd163 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.765782] env[62503]: INFO nova.virt.vmwareapi.ds_util [None req-94ca5c44-b57b-4538-a270-8dbf9cf90708 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk to [datastore1] 485d3aba-6c0d-46c7-860b-c0dbd9c16498/8150ca02-f879-471d-8913-459408f127a1-rescue.vmdk. [ 1053.766499] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-625e97eb-4992-47b3-85cb-15cd240833de {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.790665] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-94ca5c44-b57b-4538-a270-8dbf9cf90708 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 485d3aba-6c0d-46c7-860b-c0dbd9c16498] Reconfiguring VM instance instance-00000068 to attach disk [datastore1] 485d3aba-6c0d-46c7-860b-c0dbd9c16498/8150ca02-f879-471d-8913-459408f127a1-rescue.vmdk or device None with type thin {{(pid=62503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1053.791981] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cba49ac1-cbea-4272-beed-4d6da9c88098 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.806429] env[62503]: DEBUG oslo_vmware.api [None req-c6c62791-322b-4c1e-9e79-d30b48a46e01 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Waiting for the task: (returnval){ [ 1053.806429] env[62503]: value = "task-1388211" [ 1053.806429] env[62503]: _type = "Task" [ 1053.806429] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1053.813027] env[62503]: DEBUG oslo_vmware.api [None req-94ca5c44-b57b-4538-a270-8dbf9cf90708 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Waiting for the task: (returnval){ [ 1053.813027] env[62503]: value = "task-1388212" [ 1053.813027] env[62503]: _type = "Task" [ 1053.813027] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1053.818999] env[62503]: DEBUG oslo_vmware.api [None req-c6c62791-322b-4c1e-9e79-d30b48a46e01 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1388211, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.823693] env[62503]: DEBUG oslo_vmware.api [None req-94ca5c44-b57b-4538-a270-8dbf9cf90708 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Task: {'id': task-1388212, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.316402] env[62503]: DEBUG oslo_vmware.api [None req-c6c62791-322b-4c1e-9e79-d30b48a46e01 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1388211, 'name': ReconfigVM_Task, 'duration_secs': 0.138366} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1054.319330] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-c6c62791-322b-4c1e-9e79-d30b48a46e01 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: ccc542a3-ff01-42ca-965e-706bed4c6e07] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-294642', 'volume_id': '2e377e60-04b4-4faa-8609-29e7538d02e0', 'name': 'volume-2e377e60-04b4-4faa-8609-29e7538d02e0', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'ccc542a3-ff01-42ca-965e-706bed4c6e07', 'attached_at': '', 'detached_at': '', 'volume_id': '2e377e60-04b4-4faa-8609-29e7538d02e0', 'serial': '2e377e60-04b4-4faa-8609-29e7538d02e0'} {{(pid=62503) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1054.325926] env[62503]: DEBUG oslo_vmware.api [None req-94ca5c44-b57b-4538-a270-8dbf9cf90708 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Task: {'id': task-1388212, 'name': ReconfigVM_Task, 'duration_secs': 0.252997} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1054.326216] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-94ca5c44-b57b-4538-a270-8dbf9cf90708 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 485d3aba-6c0d-46c7-860b-c0dbd9c16498] Reconfigured VM instance instance-00000068 to attach disk [datastore1] 485d3aba-6c0d-46c7-860b-c0dbd9c16498/8150ca02-f879-471d-8913-459408f127a1-rescue.vmdk or device None with type thin {{(pid=62503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1054.327011] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6dac705-0b5d-4be2-9a86-d888fb71ec0b {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.351734] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-787c75d1-81ef-4844-a2e9-ef51ce6f23a4 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.366853] env[62503]: DEBUG oslo_vmware.api [None req-94ca5c44-b57b-4538-a270-8dbf9cf90708 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Waiting for the task: (returnval){ [ 1054.366853] env[62503]: value = "task-1388213" [ 1054.366853] env[62503]: _type = "Task" [ 1054.366853] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1054.376063] env[62503]: DEBUG oslo_vmware.api [None req-94ca5c44-b57b-4538-a270-8dbf9cf90708 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Task: {'id': task-1388213, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.860280] env[62503]: DEBUG nova.objects.instance [None req-c6c62791-322b-4c1e-9e79-d30b48a46e01 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Lazy-loading 'flavor' on Instance uuid ccc542a3-ff01-42ca-965e-706bed4c6e07 {{(pid=62503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1054.875778] env[62503]: DEBUG oslo_vmware.api [None req-94ca5c44-b57b-4538-a270-8dbf9cf90708 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Task: {'id': task-1388213, 'name': ReconfigVM_Task, 'duration_secs': 0.136611} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1054.876750] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-94ca5c44-b57b-4538-a270-8dbf9cf90708 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 485d3aba-6c0d-46c7-860b-c0dbd9c16498] Powering on the VM {{(pid=62503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1054.877028] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d8b126f3-116d-416f-a6a7-3cce1bb9eaf8 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.883913] env[62503]: DEBUG oslo_vmware.api [None req-94ca5c44-b57b-4538-a270-8dbf9cf90708 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Waiting for the task: (returnval){ [ 1054.883913] env[62503]: value = "task-1388214" [ 1054.883913] env[62503]: _type = "Task" [ 1054.883913] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1054.892395] env[62503]: DEBUG oslo_vmware.api [None req-94ca5c44-b57b-4538-a270-8dbf9cf90708 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Task: {'id': task-1388214, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1055.394079] env[62503]: DEBUG oslo_vmware.api [None req-94ca5c44-b57b-4538-a270-8dbf9cf90708 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Task: {'id': task-1388214, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1055.866928] env[62503]: DEBUG oslo_concurrency.lockutils [None req-c6c62791-322b-4c1e-9e79-d30b48a46e01 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Lock "ccc542a3-ff01-42ca-965e-706bed4c6e07" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.928s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1055.895342] env[62503]: DEBUG oslo_vmware.api [None req-94ca5c44-b57b-4538-a270-8dbf9cf90708 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Task: {'id': task-1388214, 'name': PowerOnVM_Task, 'duration_secs': 0.957277} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1055.895631] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-94ca5c44-b57b-4538-a270-8dbf9cf90708 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 485d3aba-6c0d-46c7-860b-c0dbd9c16498] Powered on the VM {{(pid=62503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1055.898251] env[62503]: DEBUG nova.compute.manager [None req-94ca5c44-b57b-4538-a270-8dbf9cf90708 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 485d3aba-6c0d-46c7-860b-c0dbd9c16498] Checking state {{(pid=62503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1794}} [ 1055.898992] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e92427f-21fa-4375-b11c-2c20e29ac017 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.179620] env[62503]: DEBUG oslo_concurrency.lockutils [None req-23be28ec-1e0f-4ef5-951a-9943e0f3d634 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Acquiring lock "ccc542a3-ff01-42ca-965e-706bed4c6e07" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1056.179908] env[62503]: DEBUG oslo_concurrency.lockutils [None req-23be28ec-1e0f-4ef5-951a-9943e0f3d634 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Lock "ccc542a3-ff01-42ca-965e-706bed4c6e07" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1056.180155] env[62503]: DEBUG oslo_concurrency.lockutils [None req-23be28ec-1e0f-4ef5-951a-9943e0f3d634 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Acquiring lock "ccc542a3-ff01-42ca-965e-706bed4c6e07-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1056.180399] env[62503]: DEBUG oslo_concurrency.lockutils [None req-23be28ec-1e0f-4ef5-951a-9943e0f3d634 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Lock "ccc542a3-ff01-42ca-965e-706bed4c6e07-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1056.180587] env[62503]: DEBUG oslo_concurrency.lockutils [None req-23be28ec-1e0f-4ef5-951a-9943e0f3d634 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Lock "ccc542a3-ff01-42ca-965e-706bed4c6e07-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1056.182809] env[62503]: INFO nova.compute.manager [None req-23be28ec-1e0f-4ef5-951a-9943e0f3d634 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: ccc542a3-ff01-42ca-965e-706bed4c6e07] Terminating instance [ 1056.184518] env[62503]: DEBUG nova.compute.manager [None req-23be28ec-1e0f-4ef5-951a-9943e0f3d634 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: ccc542a3-ff01-42ca-965e-706bed4c6e07] Start destroying the instance on the hypervisor. {{(pid=62503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3156}} [ 1056.184734] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-23be28ec-1e0f-4ef5-951a-9943e0f3d634 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: ccc542a3-ff01-42ca-965e-706bed4c6e07] Destroying instance {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1056.185544] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-844cf54e-acb7-45c7-816b-c0edcb35a4ee {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.193141] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-23be28ec-1e0f-4ef5-951a-9943e0f3d634 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: ccc542a3-ff01-42ca-965e-706bed4c6e07] Powering off the VM {{(pid=62503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1056.193614] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b9b94f01-933d-4dfe-b3f5-684d9388d763 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.199590] env[62503]: DEBUG oslo_vmware.api [None req-23be28ec-1e0f-4ef5-951a-9943e0f3d634 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Waiting for the task: (returnval){ [ 1056.199590] env[62503]: value = "task-1388215" [ 1056.199590] env[62503]: _type = "Task" [ 1056.199590] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1056.206651] env[62503]: DEBUG oslo_vmware.api [None req-23be28ec-1e0f-4ef5-951a-9943e0f3d634 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1388215, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1056.514145] env[62503]: DEBUG oslo_service.periodic_task [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1056.514503] env[62503]: DEBUG oslo_service.periodic_task [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1056.514503] env[62503]: DEBUG nova.compute.manager [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Starting heal instance info cache {{(pid=62503) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10224}} [ 1056.710377] env[62503]: DEBUG oslo_vmware.api [None req-23be28ec-1e0f-4ef5-951a-9943e0f3d634 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1388215, 'name': PowerOffVM_Task, 'duration_secs': 0.157341} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1056.710615] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-23be28ec-1e0f-4ef5-951a-9943e0f3d634 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: ccc542a3-ff01-42ca-965e-706bed4c6e07] Powered off the VM {{(pid=62503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1056.710763] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-23be28ec-1e0f-4ef5-951a-9943e0f3d634 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: ccc542a3-ff01-42ca-965e-706bed4c6e07] Unregistering the VM {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1056.711011] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4746fd77-275d-4214-ba73-b6aa1c897e71 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.773983] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-23be28ec-1e0f-4ef5-951a-9943e0f3d634 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: ccc542a3-ff01-42ca-965e-706bed4c6e07] Unregistered the VM {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1056.774254] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-23be28ec-1e0f-4ef5-951a-9943e0f3d634 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: ccc542a3-ff01-42ca-965e-706bed4c6e07] Deleting contents of the VM from datastore datastore2 {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1056.774385] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-23be28ec-1e0f-4ef5-951a-9943e0f3d634 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Deleting the datastore file [datastore2] ccc542a3-ff01-42ca-965e-706bed4c6e07 {{(pid=62503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1056.774626] env[62503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d5711614-8ff7-44c4-b26d-bf98ba7f71ce {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.780293] env[62503]: DEBUG oslo_vmware.api [None req-23be28ec-1e0f-4ef5-951a-9943e0f3d634 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Waiting for the task: (returnval){ [ 1056.780293] env[62503]: value = "task-1388217" [ 1056.780293] env[62503]: _type = "Task" [ 1056.780293] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1056.788546] env[62503]: DEBUG oslo_vmware.api [None req-23be28ec-1e0f-4ef5-951a-9943e0f3d634 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1388217, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1057.289620] env[62503]: DEBUG oslo_vmware.api [None req-23be28ec-1e0f-4ef5-951a-9943e0f3d634 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1388217, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.128014} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1057.289966] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-23be28ec-1e0f-4ef5-951a-9943e0f3d634 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Deleted the datastore file {{(pid=62503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1057.290173] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-23be28ec-1e0f-4ef5-951a-9943e0f3d634 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: ccc542a3-ff01-42ca-965e-706bed4c6e07] Deleted contents of the VM from datastore datastore2 {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1057.290380] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-23be28ec-1e0f-4ef5-951a-9943e0f3d634 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: ccc542a3-ff01-42ca-965e-706bed4c6e07] Instance destroyed {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1057.290561] env[62503]: INFO nova.compute.manager [None req-23be28ec-1e0f-4ef5-951a-9943e0f3d634 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: ccc542a3-ff01-42ca-965e-706bed4c6e07] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1057.290793] env[62503]: DEBUG oslo.service.loopingcall [None req-23be28ec-1e0f-4ef5-951a-9943e0f3d634 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1057.290977] env[62503]: DEBUG nova.compute.manager [-] [instance: ccc542a3-ff01-42ca-965e-706bed4c6e07] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 1057.291081] env[62503]: DEBUG nova.network.neutron [-] [instance: ccc542a3-ff01-42ca-965e-706bed4c6e07] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1057.736664] env[62503]: DEBUG nova.compute.manager [req-e1236743-ac14-4430-83c5-ccdde42584b8 req-b9782958-3def-4e4d-9e51-e85748a98b61 service nova] [instance: ccc542a3-ff01-42ca-965e-706bed4c6e07] Received event network-vif-deleted-a06bfb1f-0b51-4150-8e23-cdfe68e9c27f {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 1057.736929] env[62503]: INFO nova.compute.manager [req-e1236743-ac14-4430-83c5-ccdde42584b8 req-b9782958-3def-4e4d-9e51-e85748a98b61 service nova] [instance: ccc542a3-ff01-42ca-965e-706bed4c6e07] Neutron deleted interface a06bfb1f-0b51-4150-8e23-cdfe68e9c27f; detaching it from the instance and deleting it from the info cache [ 1057.737134] env[62503]: DEBUG nova.network.neutron [req-e1236743-ac14-4430-83c5-ccdde42584b8 req-b9782958-3def-4e4d-9e51-e85748a98b61 service nova] [instance: ccc542a3-ff01-42ca-965e-706bed4c6e07] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1057.907295] env[62503]: INFO nova.compute.manager [None req-ee37334d-603b-44dd-9c33-ac81cc5312de tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 987b6101-565e-4eb2-b8af-f9afd5be38ce] Rescuing [ 1057.907647] env[62503]: DEBUG oslo_concurrency.lockutils [None req-ee37334d-603b-44dd-9c33-ac81cc5312de tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Acquiring lock "refresh_cache-987b6101-565e-4eb2-b8af-f9afd5be38ce" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1057.907846] env[62503]: DEBUG oslo_concurrency.lockutils [None req-ee37334d-603b-44dd-9c33-ac81cc5312de tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Acquired lock "refresh_cache-987b6101-565e-4eb2-b8af-f9afd5be38ce" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1057.908072] env[62503]: DEBUG nova.network.neutron [None req-ee37334d-603b-44dd-9c33-ac81cc5312de tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 987b6101-565e-4eb2-b8af-f9afd5be38ce] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1058.216293] env[62503]: DEBUG nova.network.neutron [-] [instance: ccc542a3-ff01-42ca-965e-706bed4c6e07] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1058.239917] env[62503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d88a2dfd-206f-4066-ada7-0984427f0d1f {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.249789] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-accfe64e-7e01-4ffd-bbee-7d031e57d0cc {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.275726] env[62503]: DEBUG nova.compute.manager [req-e1236743-ac14-4430-83c5-ccdde42584b8 req-b9782958-3def-4e4d-9e51-e85748a98b61 service nova] [instance: ccc542a3-ff01-42ca-965e-706bed4c6e07] Detach interface failed, port_id=a06bfb1f-0b51-4150-8e23-cdfe68e9c27f, reason: Instance ccc542a3-ff01-42ca-965e-706bed4c6e07 could not be found. {{(pid=62503) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11261}} [ 1058.524755] env[62503]: DEBUG nova.compute.manager [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Didn't find any instances for network info cache update. {{(pid=62503) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10310}} [ 1058.524755] env[62503]: DEBUG oslo_service.periodic_task [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1058.524857] env[62503]: DEBUG oslo_service.periodic_task [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1058.524983] env[62503]: DEBUG oslo_service.periodic_task [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1058.525152] env[62503]: DEBUG oslo_service.periodic_task [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1058.525290] env[62503]: DEBUG oslo_service.periodic_task [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1058.525435] env[62503]: DEBUG oslo_service.periodic_task [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1058.525566] env[62503]: DEBUG nova.compute.manager [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62503) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10843}} [ 1058.525710] env[62503]: DEBUG oslo_service.periodic_task [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1058.609638] env[62503]: DEBUG nova.network.neutron [None req-ee37334d-603b-44dd-9c33-ac81cc5312de tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 987b6101-565e-4eb2-b8af-f9afd5be38ce] Updating instance_info_cache with network_info: [{"id": "e0d3c642-f374-431c-95ca-9211403e44c6", "address": "fa:16:3e:cb:55:28", "network": {"id": "62a43567-d25a-406f-a4c8-68e056f67595", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-156718245-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b86eee9480274a9196fc8ccd920671f0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b8b5b5e2-866d-4ab5-b74d-4a47de0c4877", "external-id": "nsx-vlan-transportzone-74", "segmentation_id": 74, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape0d3c642-f3", "ovs_interfaceid": "e0d3c642-f374-431c-95ca-9211403e44c6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1058.719281] env[62503]: INFO nova.compute.manager [-] [instance: ccc542a3-ff01-42ca-965e-706bed4c6e07] Took 1.43 seconds to deallocate network for instance. [ 1059.030623] env[62503]: DEBUG oslo_concurrency.lockutils [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1059.030985] env[62503]: DEBUG oslo_concurrency.lockutils [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1059.030985] env[62503]: DEBUG oslo_concurrency.lockutils [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1059.031146] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62503) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1059.032047] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-249030ab-4e4c-45cd-a9aa-2b6f2a7f244e {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.040136] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c34b315-168e-4c67-b356-ed74d700306d {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.053415] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90cd77b8-6f22-4881-968a-c206d30238e8 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.059123] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce65f7e2-4879-4953-943b-914c70f917a3 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.086210] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180789MB free_disk=175GB free_vcpus=48 pci_devices=None {{(pid=62503) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1059.086372] env[62503]: DEBUG oslo_concurrency.lockutils [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1059.086546] env[62503]: DEBUG oslo_concurrency.lockutils [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1059.111945] env[62503]: DEBUG oslo_concurrency.lockutils [None req-ee37334d-603b-44dd-9c33-ac81cc5312de tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Releasing lock "refresh_cache-987b6101-565e-4eb2-b8af-f9afd5be38ce" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1059.225632] env[62503]: DEBUG oslo_concurrency.lockutils [None req-23be28ec-1e0f-4ef5-951a-9943e0f3d634 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1060.115015] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Instance b6fddb0d-70f5-433f-a0ef-0d6bffb35579 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1060.115316] env[62503]: WARNING nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Instance ccc542a3-ff01-42ca-965e-706bed4c6e07 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1060.115404] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Instance dd6341e2-cd68-4d12-80e7-51184d448764 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1060.115498] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Instance 987b6101-565e-4eb2-b8af-f9afd5be38ce actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1060.115615] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Instance 485d3aba-6c0d-46c7-860b-c0dbd9c16498 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1060.115796] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Total usable vcpus: 48, total allocated vcpus: 4 {{(pid=62503) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1060.115932] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1280MB phys_disk=200GB used_disk=4GB total_vcpus=48 used_vcpus=4 pci_stats=[] {{(pid=62503) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1060.143599] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee37334d-603b-44dd-9c33-ac81cc5312de tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 987b6101-565e-4eb2-b8af-f9afd5be38ce] Powering off the VM {{(pid=62503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1060.144088] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-92bda1a8-10bc-4948-b2a7-a48cb3aceb53 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.150931] env[62503]: DEBUG oslo_vmware.api [None req-ee37334d-603b-44dd-9c33-ac81cc5312de tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Waiting for the task: (returnval){ [ 1060.150931] env[62503]: value = "task-1388218" [ 1060.150931] env[62503]: _type = "Task" [ 1060.150931] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1060.161972] env[62503]: DEBUG oslo_vmware.api [None req-ee37334d-603b-44dd-9c33-ac81cc5312de tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Task: {'id': task-1388218, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1060.189272] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a2e1c9f-1b80-4481-b366-08dafe2f3e62 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.196358] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2ae8e6b-9b85-484a-ae4c-1c4a96bec2dc {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.225036] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29116036-7829-4ed0-9457-ccd4ce1172fe {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.231948] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60ce7fcb-a7df-465f-b2bf-fb3aa9f9fef0 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.244881] env[62503]: DEBUG nova.compute.provider_tree [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1060.660585] env[62503]: DEBUG oslo_vmware.api [None req-ee37334d-603b-44dd-9c33-ac81cc5312de tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Task: {'id': task-1388218, 'name': PowerOffVM_Task, 'duration_secs': 0.157533} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1060.660837] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee37334d-603b-44dd-9c33-ac81cc5312de tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 987b6101-565e-4eb2-b8af-f9afd5be38ce] Powered off the VM {{(pid=62503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1060.661601] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a18c5196-0e86-49a8-a4b6-77abd8c5b4e6 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.679591] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57ec3d6f-6933-4f04-b42e-8b254d3e0a3b {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.703963] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee37334d-603b-44dd-9c33-ac81cc5312de tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 987b6101-565e-4eb2-b8af-f9afd5be38ce] Powering off the VM {{(pid=62503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1060.704208] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4948a203-0feb-40fe-8ec4-50381fcb3892 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.709858] env[62503]: DEBUG oslo_vmware.api [None req-ee37334d-603b-44dd-9c33-ac81cc5312de tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Waiting for the task: (returnval){ [ 1060.709858] env[62503]: value = "task-1388219" [ 1060.709858] env[62503]: _type = "Task" [ 1060.709858] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1060.717036] env[62503]: DEBUG oslo_vmware.api [None req-ee37334d-603b-44dd-9c33-ac81cc5312de tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Task: {'id': task-1388219, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1060.747947] env[62503]: DEBUG nova.scheduler.client.report [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1061.220653] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee37334d-603b-44dd-9c33-ac81cc5312de tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 987b6101-565e-4eb2-b8af-f9afd5be38ce] VM already powered off {{(pid=62503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1061.220961] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-ee37334d-603b-44dd-9c33-ac81cc5312de tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 987b6101-565e-4eb2-b8af-f9afd5be38ce] Processing image 8150ca02-f879-471d-8913-459408f127a1 {{(pid=62503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1061.221139] env[62503]: DEBUG oslo_concurrency.lockutils [None req-ee37334d-603b-44dd-9c33-ac81cc5312de tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1061.221294] env[62503]: DEBUG oslo_concurrency.lockutils [None req-ee37334d-603b-44dd-9c33-ac81cc5312de tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Acquired lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1061.221478] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-ee37334d-603b-44dd-9c33-ac81cc5312de tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1061.221730] env[62503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f6932c60-c5e7-4375-8774-d76a2c3271ce {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.230152] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-ee37334d-603b-44dd-9c33-ac81cc5312de tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1061.230387] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-ee37334d-603b-44dd-9c33-ac81cc5312de tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1061.231097] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e8747202-9add-40c9-b68b-d4f7244db702 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.235971] env[62503]: DEBUG oslo_vmware.api [None req-ee37334d-603b-44dd-9c33-ac81cc5312de tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Waiting for the task: (returnval){ [ 1061.235971] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]521e1c5a-28f4-e641-7ef9-85830956577d" [ 1061.235971] env[62503]: _type = "Task" [ 1061.235971] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1061.243391] env[62503]: DEBUG oslo_vmware.api [None req-ee37334d-603b-44dd-9c33-ac81cc5312de tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]521e1c5a-28f4-e641-7ef9-85830956577d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1061.252093] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62503) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1061.252309] env[62503]: DEBUG oslo_concurrency.lockutils [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.166s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1061.252543] env[62503]: DEBUG oslo_concurrency.lockutils [None req-23be28ec-1e0f-4ef5-951a-9943e0f3d634 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.027s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1061.252739] env[62503]: DEBUG oslo_concurrency.lockutils [None req-23be28ec-1e0f-4ef5-951a-9943e0f3d634 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1061.270931] env[62503]: INFO nova.scheduler.client.report [None req-23be28ec-1e0f-4ef5-951a-9943e0f3d634 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Deleted allocations for instance ccc542a3-ff01-42ca-965e-706bed4c6e07 [ 1061.746485] env[62503]: DEBUG oslo_vmware.api [None req-ee37334d-603b-44dd-9c33-ac81cc5312de tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]521e1c5a-28f4-e641-7ef9-85830956577d, 'name': SearchDatastore_Task, 'duration_secs': 0.0084} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1061.747270] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f2e5de29-7313-440e-8889-cfbcd42dc5dc {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.751936] env[62503]: DEBUG oslo_vmware.api [None req-ee37334d-603b-44dd-9c33-ac81cc5312de tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Waiting for the task: (returnval){ [ 1061.751936] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]527348e1-9456-3d1f-a03c-12cccf77e986" [ 1061.751936] env[62503]: _type = "Task" [ 1061.751936] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1061.758687] env[62503]: DEBUG oslo_vmware.api [None req-ee37334d-603b-44dd-9c33-ac81cc5312de tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]527348e1-9456-3d1f-a03c-12cccf77e986, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1061.777403] env[62503]: DEBUG oslo_concurrency.lockutils [None req-23be28ec-1e0f-4ef5-951a-9943e0f3d634 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Lock "ccc542a3-ff01-42ca-965e-706bed4c6e07" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.597s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1062.035272] env[62503]: DEBUG oslo_concurrency.lockutils [None req-5b011a35-ae72-489d-bbfa-898626185b0f tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Acquiring lock "dd6341e2-cd68-4d12-80e7-51184d448764" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1062.035526] env[62503]: DEBUG oslo_concurrency.lockutils [None req-5b011a35-ae72-489d-bbfa-898626185b0f tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Lock "dd6341e2-cd68-4d12-80e7-51184d448764" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1062.261960] env[62503]: DEBUG oslo_vmware.api [None req-ee37334d-603b-44dd-9c33-ac81cc5312de tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]527348e1-9456-3d1f-a03c-12cccf77e986, 'name': SearchDatastore_Task, 'duration_secs': 0.008112} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1062.262373] env[62503]: DEBUG oslo_concurrency.lockutils [None req-ee37334d-603b-44dd-9c33-ac81cc5312de tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Releasing lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1062.262659] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-ee37334d-603b-44dd-9c33-ac81cc5312de tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk to [datastore1] 987b6101-565e-4eb2-b8af-f9afd5be38ce/8150ca02-f879-471d-8913-459408f127a1-rescue.vmdk. {{(pid=62503) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1062.262930] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0d14a145-c25a-4658-8b7b-45789916de54 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.269493] env[62503]: DEBUG oslo_vmware.api [None req-ee37334d-603b-44dd-9c33-ac81cc5312de tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Waiting for the task: (returnval){ [ 1062.269493] env[62503]: value = "task-1388220" [ 1062.269493] env[62503]: _type = "Task" [ 1062.269493] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1062.277226] env[62503]: DEBUG oslo_vmware.api [None req-ee37334d-603b-44dd-9c33-ac81cc5312de tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Task: {'id': task-1388220, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1062.539133] env[62503]: DEBUG nova.compute.utils [None req-5b011a35-ae72-489d-bbfa-898626185b0f tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Using /dev/sd instead of None {{(pid=62503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1062.782652] env[62503]: DEBUG oslo_vmware.api [None req-ee37334d-603b-44dd-9c33-ac81cc5312de tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Task: {'id': task-1388220, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.476852} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1062.783042] env[62503]: INFO nova.virt.vmwareapi.ds_util [None req-ee37334d-603b-44dd-9c33-ac81cc5312de tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk to [datastore1] 987b6101-565e-4eb2-b8af-f9afd5be38ce/8150ca02-f879-471d-8913-459408f127a1-rescue.vmdk. [ 1062.784187] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8311332-e933-4f4f-bbbe-b437733f3a7d {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.816127] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-ee37334d-603b-44dd-9c33-ac81cc5312de tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 987b6101-565e-4eb2-b8af-f9afd5be38ce] Reconfiguring VM instance instance-00000067 to attach disk [datastore1] 987b6101-565e-4eb2-b8af-f9afd5be38ce/8150ca02-f879-471d-8913-459408f127a1-rescue.vmdk or device None with type thin {{(pid=62503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1062.816845] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-70bf25d3-7569-431f-acb9-9059b2be2883 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.836171] env[62503]: DEBUG oslo_vmware.api [None req-ee37334d-603b-44dd-9c33-ac81cc5312de tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Waiting for the task: (returnval){ [ 1062.836171] env[62503]: value = "task-1388221" [ 1062.836171] env[62503]: _type = "Task" [ 1062.836171] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1062.843849] env[62503]: DEBUG oslo_vmware.api [None req-ee37334d-603b-44dd-9c33-ac81cc5312de tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Task: {'id': task-1388221, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1063.042081] env[62503]: DEBUG oslo_concurrency.lockutils [None req-5b011a35-ae72-489d-bbfa-898626185b0f tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Lock "dd6341e2-cd68-4d12-80e7-51184d448764" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1063.345997] env[62503]: DEBUG oslo_vmware.api [None req-ee37334d-603b-44dd-9c33-ac81cc5312de tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Task: {'id': task-1388221, 'name': ReconfigVM_Task, 'duration_secs': 0.30813} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1063.346306] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-ee37334d-603b-44dd-9c33-ac81cc5312de tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 987b6101-565e-4eb2-b8af-f9afd5be38ce] Reconfigured VM instance instance-00000067 to attach disk [datastore1] 987b6101-565e-4eb2-b8af-f9afd5be38ce/8150ca02-f879-471d-8913-459408f127a1-rescue.vmdk or device None with type thin {{(pid=62503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1063.347132] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-855667a0-717b-40c6-a607-b1c213adbbc5 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.370622] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-dfadf7ad-f7f7-4a27-9851-125b84591aab {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.386059] env[62503]: DEBUG oslo_vmware.api [None req-ee37334d-603b-44dd-9c33-ac81cc5312de tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Waiting for the task: (returnval){ [ 1063.386059] env[62503]: value = "task-1388222" [ 1063.386059] env[62503]: _type = "Task" [ 1063.386059] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1063.394410] env[62503]: DEBUG oslo_vmware.api [None req-ee37334d-603b-44dd-9c33-ac81cc5312de tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Task: {'id': task-1388222, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1063.895261] env[62503]: DEBUG oslo_vmware.api [None req-ee37334d-603b-44dd-9c33-ac81cc5312de tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Task: {'id': task-1388222, 'name': ReconfigVM_Task, 'duration_secs': 0.143846} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1063.895546] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee37334d-603b-44dd-9c33-ac81cc5312de tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 987b6101-565e-4eb2-b8af-f9afd5be38ce] Powering on the VM {{(pid=62503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1063.895797] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f62d43be-305e-469d-8694-c0e399477a34 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.901911] env[62503]: DEBUG oslo_vmware.api [None req-ee37334d-603b-44dd-9c33-ac81cc5312de tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Waiting for the task: (returnval){ [ 1063.901911] env[62503]: value = "task-1388223" [ 1063.901911] env[62503]: _type = "Task" [ 1063.901911] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1063.908944] env[62503]: DEBUG oslo_vmware.api [None req-ee37334d-603b-44dd-9c33-ac81cc5312de tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Task: {'id': task-1388223, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1063.912603] env[62503]: DEBUG oslo_concurrency.lockutils [None req-624e6a9f-6cc4-4a93-bcc3-71b1c59844f4 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Acquiring lock "0ae63db4-6856-46d8-afa9-876b17152859" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1063.912822] env[62503]: DEBUG oslo_concurrency.lockutils [None req-624e6a9f-6cc4-4a93-bcc3-71b1c59844f4 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Lock "0ae63db4-6856-46d8-afa9-876b17152859" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1064.102863] env[62503]: DEBUG oslo_concurrency.lockutils [None req-5b011a35-ae72-489d-bbfa-898626185b0f tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Acquiring lock "dd6341e2-cd68-4d12-80e7-51184d448764" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1064.103306] env[62503]: DEBUG oslo_concurrency.lockutils [None req-5b011a35-ae72-489d-bbfa-898626185b0f tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Lock "dd6341e2-cd68-4d12-80e7-51184d448764" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1064.103657] env[62503]: INFO nova.compute.manager [None req-5b011a35-ae72-489d-bbfa-898626185b0f tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: dd6341e2-cd68-4d12-80e7-51184d448764] Attaching volume a6b7fd29-ef47-45ee-986f-8544f580f012 to /dev/sdb [ 1064.139355] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-274b3f77-3471-42c0-82b6-a56b4219e0a9 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.146571] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81135fb5-ff49-4609-9ad3-ee3a924a7d7c {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.159517] env[62503]: DEBUG nova.virt.block_device [None req-5b011a35-ae72-489d-bbfa-898626185b0f tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: dd6341e2-cd68-4d12-80e7-51184d448764] Updating existing volume attachment record: f569c58e-5270-474b-91ac-828accd061bd {{(pid=62503) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1064.412050] env[62503]: DEBUG oslo_vmware.api [None req-ee37334d-603b-44dd-9c33-ac81cc5312de tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Task: {'id': task-1388223, 'name': PowerOnVM_Task, 'duration_secs': 0.383692} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1064.412422] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee37334d-603b-44dd-9c33-ac81cc5312de tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 987b6101-565e-4eb2-b8af-f9afd5be38ce] Powered on the VM {{(pid=62503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1064.415270] env[62503]: DEBUG nova.compute.manager [None req-ee37334d-603b-44dd-9c33-ac81cc5312de tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 987b6101-565e-4eb2-b8af-f9afd5be38ce] Checking state {{(pid=62503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1794}} [ 1064.415654] env[62503]: DEBUG nova.compute.manager [None req-624e6a9f-6cc4-4a93-bcc3-71b1c59844f4 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: 0ae63db4-6856-46d8-afa9-876b17152859] Starting instance... {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 1064.418545] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e08ba37-c75b-4aa0-bbfb-3bea17a7bc5d {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.939470] env[62503]: DEBUG oslo_concurrency.lockutils [None req-624e6a9f-6cc4-4a93-bcc3-71b1c59844f4 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1064.939752] env[62503]: DEBUG oslo_concurrency.lockutils [None req-624e6a9f-6cc4-4a93-bcc3-71b1c59844f4 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1064.941180] env[62503]: INFO nova.compute.claims [None req-624e6a9f-6cc4-4a93-bcc3-71b1c59844f4 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: 0ae63db4-6856-46d8-afa9-876b17152859] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1065.869260] env[62503]: INFO nova.compute.manager [None req-67db6d56-a3dd-4307-af3f-b6c423424c04 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 987b6101-565e-4eb2-b8af-f9afd5be38ce] Unrescuing [ 1065.869625] env[62503]: DEBUG oslo_concurrency.lockutils [None req-67db6d56-a3dd-4307-af3f-b6c423424c04 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Acquiring lock "refresh_cache-987b6101-565e-4eb2-b8af-f9afd5be38ce" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1065.869748] env[62503]: DEBUG oslo_concurrency.lockutils [None req-67db6d56-a3dd-4307-af3f-b6c423424c04 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Acquired lock "refresh_cache-987b6101-565e-4eb2-b8af-f9afd5be38ce" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1065.869986] env[62503]: DEBUG nova.network.neutron [None req-67db6d56-a3dd-4307-af3f-b6c423424c04 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 987b6101-565e-4eb2-b8af-f9afd5be38ce] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1066.012183] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27dfc4a0-e620-4282-9b4e-32489835b715 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.019675] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71f2fd46-7205-4185-a036-62ceb483b5a5 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.048874] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1dd8ae16-7ae6-4943-91ef-3c0d0e8dfe88 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.055300] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bd17c78-ce10-40ba-8082-d8786b12b2f9 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.067505] env[62503]: DEBUG nova.compute.provider_tree [None req-624e6a9f-6cc4-4a93-bcc3-71b1c59844f4 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1066.570745] env[62503]: DEBUG nova.scheduler.client.report [None req-624e6a9f-6cc4-4a93-bcc3-71b1c59844f4 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1066.619407] env[62503]: DEBUG nova.network.neutron [None req-67db6d56-a3dd-4307-af3f-b6c423424c04 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 987b6101-565e-4eb2-b8af-f9afd5be38ce] Updating instance_info_cache with network_info: [{"id": "e0d3c642-f374-431c-95ca-9211403e44c6", "address": "fa:16:3e:cb:55:28", "network": {"id": "62a43567-d25a-406f-a4c8-68e056f67595", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-156718245-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b86eee9480274a9196fc8ccd920671f0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b8b5b5e2-866d-4ab5-b74d-4a47de0c4877", "external-id": "nsx-vlan-transportzone-74", "segmentation_id": 74, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape0d3c642-f3", "ovs_interfaceid": "e0d3c642-f374-431c-95ca-9211403e44c6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1067.076080] env[62503]: DEBUG oslo_concurrency.lockutils [None req-624e6a9f-6cc4-4a93-bcc3-71b1c59844f4 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.136s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1067.076643] env[62503]: DEBUG nova.compute.manager [None req-624e6a9f-6cc4-4a93-bcc3-71b1c59844f4 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: 0ae63db4-6856-46d8-afa9-876b17152859] Start building networks asynchronously for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2832}} [ 1067.121968] env[62503]: DEBUG oslo_concurrency.lockutils [None req-67db6d56-a3dd-4307-af3f-b6c423424c04 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Releasing lock "refresh_cache-987b6101-565e-4eb2-b8af-f9afd5be38ce" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1067.122634] env[62503]: DEBUG nova.objects.instance [None req-67db6d56-a3dd-4307-af3f-b6c423424c04 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Lazy-loading 'flavor' on Instance uuid 987b6101-565e-4eb2-b8af-f9afd5be38ce {{(pid=62503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1067.581477] env[62503]: DEBUG nova.compute.utils [None req-624e6a9f-6cc4-4a93-bcc3-71b1c59844f4 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Using /dev/sd instead of None {{(pid=62503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1067.582902] env[62503]: DEBUG nova.compute.manager [None req-624e6a9f-6cc4-4a93-bcc3-71b1c59844f4 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: 0ae63db4-6856-46d8-afa9-876b17152859] Allocating IP information in the background. {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 1067.583082] env[62503]: DEBUG nova.network.neutron [None req-624e6a9f-6cc4-4a93-bcc3-71b1c59844f4 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: 0ae63db4-6856-46d8-afa9-876b17152859] allocate_for_instance() {{(pid=62503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1067.621799] env[62503]: DEBUG nova.policy [None req-624e6a9f-6cc4-4a93-bcc3-71b1c59844f4 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b076e358f78e4874876f90d96fd612e5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e818e5ee9dc24efa96747c9558514a15', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62503) authorize /opt/stack/nova/nova/policy.py:201}} [ 1067.627633] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecfab2e0-609b-40c8-914a-3ab4f8c6aa2a {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.649600] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-67db6d56-a3dd-4307-af3f-b6c423424c04 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 987b6101-565e-4eb2-b8af-f9afd5be38ce] Powering off the VM {{(pid=62503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1067.649925] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b474cc9d-19b9-44b1-8275-fccb04009fc5 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.656342] env[62503]: DEBUG oslo_vmware.api [None req-67db6d56-a3dd-4307-af3f-b6c423424c04 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Waiting for the task: (returnval){ [ 1067.656342] env[62503]: value = "task-1388226" [ 1067.656342] env[62503]: _type = "Task" [ 1067.656342] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1067.664227] env[62503]: DEBUG oslo_vmware.api [None req-67db6d56-a3dd-4307-af3f-b6c423424c04 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Task: {'id': task-1388226, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1067.919526] env[62503]: DEBUG nova.network.neutron [None req-624e6a9f-6cc4-4a93-bcc3-71b1c59844f4 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: 0ae63db4-6856-46d8-afa9-876b17152859] Successfully created port: 95590754-6e6e-4929-87d5-f7ac675aaf17 {{(pid=62503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1068.086789] env[62503]: DEBUG nova.compute.manager [None req-624e6a9f-6cc4-4a93-bcc3-71b1c59844f4 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: 0ae63db4-6856-46d8-afa9-876b17152859] Start building block device mappings for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2867}} [ 1068.166316] env[62503]: DEBUG oslo_vmware.api [None req-67db6d56-a3dd-4307-af3f-b6c423424c04 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Task: {'id': task-1388226, 'name': PowerOffVM_Task, 'duration_secs': 0.240544} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1068.166634] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-67db6d56-a3dd-4307-af3f-b6c423424c04 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 987b6101-565e-4eb2-b8af-f9afd5be38ce] Powered off the VM {{(pid=62503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1068.172268] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-67db6d56-a3dd-4307-af3f-b6c423424c04 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 987b6101-565e-4eb2-b8af-f9afd5be38ce] Reconfiguring VM instance instance-00000067 to detach disk 2001 {{(pid=62503) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1068.172268] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-905740f3-7fbb-44e9-b887-955e47d9e772 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.191895] env[62503]: DEBUG oslo_vmware.api [None req-67db6d56-a3dd-4307-af3f-b6c423424c04 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Waiting for the task: (returnval){ [ 1068.191895] env[62503]: value = "task-1388227" [ 1068.191895] env[62503]: _type = "Task" [ 1068.191895] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1068.199725] env[62503]: DEBUG oslo_vmware.api [None req-67db6d56-a3dd-4307-af3f-b6c423424c04 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Task: {'id': task-1388227, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1068.701538] env[62503]: DEBUG oslo_vmware.api [None req-67db6d56-a3dd-4307-af3f-b6c423424c04 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Task: {'id': task-1388227, 'name': ReconfigVM_Task, 'duration_secs': 0.208295} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1068.701871] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-67db6d56-a3dd-4307-af3f-b6c423424c04 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 987b6101-565e-4eb2-b8af-f9afd5be38ce] Reconfigured VM instance instance-00000067 to detach disk 2001 {{(pid=62503) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1068.702035] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-67db6d56-a3dd-4307-af3f-b6c423424c04 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 987b6101-565e-4eb2-b8af-f9afd5be38ce] Powering on the VM {{(pid=62503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1068.703047] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-5b011a35-ae72-489d-bbfa-898626185b0f tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: dd6341e2-cd68-4d12-80e7-51184d448764] Volume attach. Driver type: vmdk {{(pid=62503) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1068.703264] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-5b011a35-ae72-489d-bbfa-898626185b0f tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: dd6341e2-cd68-4d12-80e7-51184d448764] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-294651', 'volume_id': 'a6b7fd29-ef47-45ee-986f-8544f580f012', 'name': 'volume-a6b7fd29-ef47-45ee-986f-8544f580f012', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'dd6341e2-cd68-4d12-80e7-51184d448764', 'attached_at': '', 'detached_at': '', 'volume_id': 'a6b7fd29-ef47-45ee-986f-8544f580f012', 'serial': 'a6b7fd29-ef47-45ee-986f-8544f580f012'} {{(pid=62503) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1068.703523] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6ba27a88-4aca-4f68-9ee6-af63c811cbe0 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.705543] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6eefa8b1-841f-4adb-9754-0af18ba2e7a5 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.721155] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a5afb55-d83c-4228-a497-3cbf55414e73 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.723438] env[62503]: DEBUG oslo_vmware.api [None req-67db6d56-a3dd-4307-af3f-b6c423424c04 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Waiting for the task: (returnval){ [ 1068.723438] env[62503]: value = "task-1388228" [ 1068.723438] env[62503]: _type = "Task" [ 1068.723438] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1068.745696] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-5b011a35-ae72-489d-bbfa-898626185b0f tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: dd6341e2-cd68-4d12-80e7-51184d448764] Reconfiguring VM instance instance-00000064 to attach disk [datastore2] volume-a6b7fd29-ef47-45ee-986f-8544f580f012/volume-a6b7fd29-ef47-45ee-986f-8544f580f012.vmdk or device None with type thin {{(pid=62503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1068.746349] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f673f317-4c5b-4bde-b488-45e508e7514a {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.762272] env[62503]: DEBUG oslo_vmware.api [None req-67db6d56-a3dd-4307-af3f-b6c423424c04 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Task: {'id': task-1388228, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1068.768649] env[62503]: DEBUG oslo_vmware.api [None req-5b011a35-ae72-489d-bbfa-898626185b0f tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Waiting for the task: (returnval){ [ 1068.768649] env[62503]: value = "task-1388229" [ 1068.768649] env[62503]: _type = "Task" [ 1068.768649] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1068.777265] env[62503]: DEBUG oslo_vmware.api [None req-5b011a35-ae72-489d-bbfa-898626185b0f tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Task: {'id': task-1388229, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1069.096786] env[62503]: DEBUG nova.compute.manager [None req-624e6a9f-6cc4-4a93-bcc3-71b1c59844f4 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: 0ae63db4-6856-46d8-afa9-876b17152859] Start spawning the instance on the hypervisor. {{(pid=62503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2641}} [ 1069.123444] env[62503]: DEBUG nova.virt.hardware [None req-624e6a9f-6cc4-4a93-bcc3-71b1c59844f4 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:26:20Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:26:03Z,direct_url=,disk_format='vmdk',id=8150ca02-f879-471d-8913-459408f127a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26d9ee75d25b4018b8bb1fb11c8bd98c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:26:04Z,virtual_size=,visibility=), allow threads: False {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1069.123726] env[62503]: DEBUG nova.virt.hardware [None req-624e6a9f-6cc4-4a93-bcc3-71b1c59844f4 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Flavor limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1069.123888] env[62503]: DEBUG nova.virt.hardware [None req-624e6a9f-6cc4-4a93-bcc3-71b1c59844f4 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Image limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1069.124095] env[62503]: DEBUG nova.virt.hardware [None req-624e6a9f-6cc4-4a93-bcc3-71b1c59844f4 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Flavor pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1069.124251] env[62503]: DEBUG nova.virt.hardware [None req-624e6a9f-6cc4-4a93-bcc3-71b1c59844f4 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Image pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1069.124405] env[62503]: DEBUG nova.virt.hardware [None req-624e6a9f-6cc4-4a93-bcc3-71b1c59844f4 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1069.124622] env[62503]: DEBUG nova.virt.hardware [None req-624e6a9f-6cc4-4a93-bcc3-71b1c59844f4 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1069.124829] env[62503]: DEBUG nova.virt.hardware [None req-624e6a9f-6cc4-4a93-bcc3-71b1c59844f4 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1069.125022] env[62503]: DEBUG nova.virt.hardware [None req-624e6a9f-6cc4-4a93-bcc3-71b1c59844f4 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Got 1 possible topologies {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1069.125198] env[62503]: DEBUG nova.virt.hardware [None req-624e6a9f-6cc4-4a93-bcc3-71b1c59844f4 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1069.125378] env[62503]: DEBUG nova.virt.hardware [None req-624e6a9f-6cc4-4a93-bcc3-71b1c59844f4 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1069.126278] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-447a2a20-b9e6-4316-ad84-2a2eedb92679 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.133909] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7c27d7b-9aea-4afa-8499-3b7fa4634967 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.233198] env[62503]: DEBUG oslo_vmware.api [None req-67db6d56-a3dd-4307-af3f-b6c423424c04 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Task: {'id': task-1388228, 'name': PowerOnVM_Task, 'duration_secs': 0.454405} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1069.233477] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-67db6d56-a3dd-4307-af3f-b6c423424c04 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 987b6101-565e-4eb2-b8af-f9afd5be38ce] Powered on the VM {{(pid=62503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1069.233716] env[62503]: DEBUG nova.compute.manager [None req-67db6d56-a3dd-4307-af3f-b6c423424c04 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 987b6101-565e-4eb2-b8af-f9afd5be38ce] Checking state {{(pid=62503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1794}} [ 1069.234502] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e03cdd7f-42b3-4bfc-b7f4-2fe13018e182 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.280512] env[62503]: DEBUG oslo_vmware.api [None req-5b011a35-ae72-489d-bbfa-898626185b0f tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Task: {'id': task-1388229, 'name': ReconfigVM_Task, 'duration_secs': 0.395383} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1069.281817] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-5b011a35-ae72-489d-bbfa-898626185b0f tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: dd6341e2-cd68-4d12-80e7-51184d448764] Reconfigured VM instance instance-00000064 to attach disk [datastore2] volume-a6b7fd29-ef47-45ee-986f-8544f580f012/volume-a6b7fd29-ef47-45ee-986f-8544f580f012.vmdk or device None with type thin {{(pid=62503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1069.287496] env[62503]: DEBUG nova.compute.manager [req-c7a18751-3fa1-422d-8e59-8d11846f5285 req-4f873647-c8e9-4df4-bff7-fdd117a7187a service nova] [instance: 0ae63db4-6856-46d8-afa9-876b17152859] Received event network-vif-plugged-95590754-6e6e-4929-87d5-f7ac675aaf17 {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 1069.287708] env[62503]: DEBUG oslo_concurrency.lockutils [req-c7a18751-3fa1-422d-8e59-8d11846f5285 req-4f873647-c8e9-4df4-bff7-fdd117a7187a service nova] Acquiring lock "0ae63db4-6856-46d8-afa9-876b17152859-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1069.287932] env[62503]: DEBUG oslo_concurrency.lockutils [req-c7a18751-3fa1-422d-8e59-8d11846f5285 req-4f873647-c8e9-4df4-bff7-fdd117a7187a service nova] Lock "0ae63db4-6856-46d8-afa9-876b17152859-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1069.288128] env[62503]: DEBUG oslo_concurrency.lockutils [req-c7a18751-3fa1-422d-8e59-8d11846f5285 req-4f873647-c8e9-4df4-bff7-fdd117a7187a service nova] Lock "0ae63db4-6856-46d8-afa9-876b17152859-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1069.288293] env[62503]: DEBUG nova.compute.manager [req-c7a18751-3fa1-422d-8e59-8d11846f5285 req-4f873647-c8e9-4df4-bff7-fdd117a7187a service nova] [instance: 0ae63db4-6856-46d8-afa9-876b17152859] No waiting events found dispatching network-vif-plugged-95590754-6e6e-4929-87d5-f7ac675aaf17 {{(pid=62503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1069.288470] env[62503]: WARNING nova.compute.manager [req-c7a18751-3fa1-422d-8e59-8d11846f5285 req-4f873647-c8e9-4df4-bff7-fdd117a7187a service nova] [instance: 0ae63db4-6856-46d8-afa9-876b17152859] Received unexpected event network-vif-plugged-95590754-6e6e-4929-87d5-f7ac675aaf17 for instance with vm_state building and task_state spawning. [ 1069.289206] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2443a0be-2bcc-4f0d-a0fd-e48a39f6a547 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.305251] env[62503]: DEBUG oslo_vmware.api [None req-5b011a35-ae72-489d-bbfa-898626185b0f tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Waiting for the task: (returnval){ [ 1069.305251] env[62503]: value = "task-1388230" [ 1069.305251] env[62503]: _type = "Task" [ 1069.305251] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1069.341069] env[62503]: DEBUG oslo_vmware.api [None req-5b011a35-ae72-489d-bbfa-898626185b0f tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Task: {'id': task-1388230, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1069.815208] env[62503]: DEBUG oslo_vmware.api [None req-5b011a35-ae72-489d-bbfa-898626185b0f tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Task: {'id': task-1388230, 'name': ReconfigVM_Task, 'duration_secs': 0.140067} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1069.815530] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-5b011a35-ae72-489d-bbfa-898626185b0f tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: dd6341e2-cd68-4d12-80e7-51184d448764] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-294651', 'volume_id': 'a6b7fd29-ef47-45ee-986f-8544f580f012', 'name': 'volume-a6b7fd29-ef47-45ee-986f-8544f580f012', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'dd6341e2-cd68-4d12-80e7-51184d448764', 'attached_at': '', 'detached_at': '', 'volume_id': 'a6b7fd29-ef47-45ee-986f-8544f580f012', 'serial': 'a6b7fd29-ef47-45ee-986f-8544f580f012'} {{(pid=62503) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1069.846058] env[62503]: DEBUG nova.network.neutron [None req-624e6a9f-6cc4-4a93-bcc3-71b1c59844f4 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: 0ae63db4-6856-46d8-afa9-876b17152859] Successfully updated port: 95590754-6e6e-4929-87d5-f7ac675aaf17 {{(pid=62503) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1069.869815] env[62503]: DEBUG nova.compute.manager [req-3efcffc6-fda0-4a5a-8ff3-1dca3dedb159 req-8d440ee3-3f9d-452a-86f2-a0f941859e46 service nova] [instance: 0ae63db4-6856-46d8-afa9-876b17152859] Received event network-changed-95590754-6e6e-4929-87d5-f7ac675aaf17 {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 1069.870040] env[62503]: DEBUG nova.compute.manager [req-3efcffc6-fda0-4a5a-8ff3-1dca3dedb159 req-8d440ee3-3f9d-452a-86f2-a0f941859e46 service nova] [instance: 0ae63db4-6856-46d8-afa9-876b17152859] Refreshing instance network info cache due to event network-changed-95590754-6e6e-4929-87d5-f7ac675aaf17. {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11432}} [ 1069.870271] env[62503]: DEBUG oslo_concurrency.lockutils [req-3efcffc6-fda0-4a5a-8ff3-1dca3dedb159 req-8d440ee3-3f9d-452a-86f2-a0f941859e46 service nova] Acquiring lock "refresh_cache-0ae63db4-6856-46d8-afa9-876b17152859" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1069.870487] env[62503]: DEBUG oslo_concurrency.lockutils [req-3efcffc6-fda0-4a5a-8ff3-1dca3dedb159 req-8d440ee3-3f9d-452a-86f2-a0f941859e46 service nova] Acquired lock "refresh_cache-0ae63db4-6856-46d8-afa9-876b17152859" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1069.870698] env[62503]: DEBUG nova.network.neutron [req-3efcffc6-fda0-4a5a-8ff3-1dca3dedb159 req-8d440ee3-3f9d-452a-86f2-a0f941859e46 service nova] [instance: 0ae63db4-6856-46d8-afa9-876b17152859] Refreshing network info cache for port 95590754-6e6e-4929-87d5-f7ac675aaf17 {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1070.349924] env[62503]: DEBUG oslo_concurrency.lockutils [None req-624e6a9f-6cc4-4a93-bcc3-71b1c59844f4 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Acquiring lock "refresh_cache-0ae63db4-6856-46d8-afa9-876b17152859" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1070.404152] env[62503]: DEBUG nova.network.neutron [req-3efcffc6-fda0-4a5a-8ff3-1dca3dedb159 req-8d440ee3-3f9d-452a-86f2-a0f941859e46 service nova] [instance: 0ae63db4-6856-46d8-afa9-876b17152859] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1070.470887] env[62503]: DEBUG nova.network.neutron [req-3efcffc6-fda0-4a5a-8ff3-1dca3dedb159 req-8d440ee3-3f9d-452a-86f2-a0f941859e46 service nova] [instance: 0ae63db4-6856-46d8-afa9-876b17152859] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1070.852481] env[62503]: DEBUG nova.objects.instance [None req-5b011a35-ae72-489d-bbfa-898626185b0f tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Lazy-loading 'flavor' on Instance uuid dd6341e2-cd68-4d12-80e7-51184d448764 {{(pid=62503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1070.973535] env[62503]: DEBUG oslo_concurrency.lockutils [req-3efcffc6-fda0-4a5a-8ff3-1dca3dedb159 req-8d440ee3-3f9d-452a-86f2-a0f941859e46 service nova] Releasing lock "refresh_cache-0ae63db4-6856-46d8-afa9-876b17152859" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1070.973943] env[62503]: DEBUG oslo_concurrency.lockutils [None req-624e6a9f-6cc4-4a93-bcc3-71b1c59844f4 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Acquired lock "refresh_cache-0ae63db4-6856-46d8-afa9-876b17152859" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1070.974120] env[62503]: DEBUG nova.network.neutron [None req-624e6a9f-6cc4-4a93-bcc3-71b1c59844f4 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: 0ae63db4-6856-46d8-afa9-876b17152859] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1071.358032] env[62503]: DEBUG oslo_concurrency.lockutils [None req-5b011a35-ae72-489d-bbfa-898626185b0f tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Lock "dd6341e2-cd68-4d12-80e7-51184d448764" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.255s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1071.511794] env[62503]: DEBUG nova.network.neutron [None req-624e6a9f-6cc4-4a93-bcc3-71b1c59844f4 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: 0ae63db4-6856-46d8-afa9-876b17152859] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1071.555713] env[62503]: DEBUG oslo_concurrency.lockutils [None req-473a95d9-18ce-45ff-94a8-19836041b0ec tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Acquiring lock "dd6341e2-cd68-4d12-80e7-51184d448764" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1071.557451] env[62503]: DEBUG oslo_concurrency.lockutils [None req-473a95d9-18ce-45ff-94a8-19836041b0ec tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Lock "dd6341e2-cd68-4d12-80e7-51184d448764" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1071.643597] env[62503]: DEBUG nova.network.neutron [None req-624e6a9f-6cc4-4a93-bcc3-71b1c59844f4 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: 0ae63db4-6856-46d8-afa9-876b17152859] Updating instance_info_cache with network_info: [{"id": "95590754-6e6e-4929-87d5-f7ac675aaf17", "address": "fa:16:3e:22:f9:d7", "network": {"id": "1705446b-db63-4b14-9929-b692ca586b36", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1437181237-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e818e5ee9dc24efa96747c9558514a15", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "77aa121f-8fb6-42f3-aaea-43addfe449b2", "external-id": "nsx-vlan-transportzone-288", "segmentation_id": 288, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap95590754-6e", "ovs_interfaceid": "95590754-6e6e-4929-87d5-f7ac675aaf17", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1072.060627] env[62503]: INFO nova.compute.manager [None req-473a95d9-18ce-45ff-94a8-19836041b0ec tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: dd6341e2-cd68-4d12-80e7-51184d448764] Detaching volume a6b7fd29-ef47-45ee-986f-8544f580f012 [ 1072.101783] env[62503]: INFO nova.virt.block_device [None req-473a95d9-18ce-45ff-94a8-19836041b0ec tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: dd6341e2-cd68-4d12-80e7-51184d448764] Attempting to driver detach volume a6b7fd29-ef47-45ee-986f-8544f580f012 from mountpoint /dev/sdb [ 1072.102070] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-473a95d9-18ce-45ff-94a8-19836041b0ec tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: dd6341e2-cd68-4d12-80e7-51184d448764] Volume detach. Driver type: vmdk {{(pid=62503) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1072.102336] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-473a95d9-18ce-45ff-94a8-19836041b0ec tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: dd6341e2-cd68-4d12-80e7-51184d448764] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-294651', 'volume_id': 'a6b7fd29-ef47-45ee-986f-8544f580f012', 'name': 'volume-a6b7fd29-ef47-45ee-986f-8544f580f012', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'dd6341e2-cd68-4d12-80e7-51184d448764', 'attached_at': '', 'detached_at': '', 'volume_id': 'a6b7fd29-ef47-45ee-986f-8544f580f012', 'serial': 'a6b7fd29-ef47-45ee-986f-8544f580f012'} {{(pid=62503) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1072.103313] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-105637fc-d288-4b0e-880a-083bbf062062 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.124854] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb8ed142-2b8f-4e96-bf77-9cae4e2039ae {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.131616] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfe29542-8d21-4459-b9e1-869984ca04c8 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.151031] env[62503]: DEBUG oslo_concurrency.lockutils [None req-624e6a9f-6cc4-4a93-bcc3-71b1c59844f4 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Releasing lock "refresh_cache-0ae63db4-6856-46d8-afa9-876b17152859" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1072.151334] env[62503]: DEBUG nova.compute.manager [None req-624e6a9f-6cc4-4a93-bcc3-71b1c59844f4 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: 0ae63db4-6856-46d8-afa9-876b17152859] Instance network_info: |[{"id": "95590754-6e6e-4929-87d5-f7ac675aaf17", "address": "fa:16:3e:22:f9:d7", "network": {"id": "1705446b-db63-4b14-9929-b692ca586b36", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1437181237-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e818e5ee9dc24efa96747c9558514a15", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "77aa121f-8fb6-42f3-aaea-43addfe449b2", "external-id": "nsx-vlan-transportzone-288", "segmentation_id": 288, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap95590754-6e", "ovs_interfaceid": "95590754-6e6e-4929-87d5-f7ac675aaf17", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1999}} [ 1072.151876] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-624e6a9f-6cc4-4a93-bcc3-71b1c59844f4 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: 0ae63db4-6856-46d8-afa9-876b17152859] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:22:f9:d7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '77aa121f-8fb6-42f3-aaea-43addfe449b2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '95590754-6e6e-4929-87d5-f7ac675aaf17', 'vif_model': 'vmxnet3'}] {{(pid=62503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1072.159110] env[62503]: DEBUG oslo.service.loopingcall [None req-624e6a9f-6cc4-4a93-bcc3-71b1c59844f4 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1072.159793] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-963d1b5c-77e3-4254-ae1f-1ae18ccc6e7a {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.162386] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0ae63db4-6856-46d8-afa9-876b17152859] Creating VM on the ESX host {{(pid=62503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1072.162808] env[62503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-12e5aebe-bb49-4563-a9c5-e95dc7819425 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.188477] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-473a95d9-18ce-45ff-94a8-19836041b0ec tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] The volume has not been displaced from its original location: [datastore2] volume-a6b7fd29-ef47-45ee-986f-8544f580f012/volume-a6b7fd29-ef47-45ee-986f-8544f580f012.vmdk. No consolidation needed. {{(pid=62503) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1072.193588] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-473a95d9-18ce-45ff-94a8-19836041b0ec tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: dd6341e2-cd68-4d12-80e7-51184d448764] Reconfiguring VM instance instance-00000064 to detach disk 2001 {{(pid=62503) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1072.194842] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6e71bc69-0101-4c3e-ae76-fe70ebecca91 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.207007] env[62503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1072.207007] env[62503]: value = "task-1388231" [ 1072.207007] env[62503]: _type = "Task" [ 1072.207007] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1072.212742] env[62503]: DEBUG oslo_vmware.api [None req-473a95d9-18ce-45ff-94a8-19836041b0ec tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Waiting for the task: (returnval){ [ 1072.212742] env[62503]: value = "task-1388232" [ 1072.212742] env[62503]: _type = "Task" [ 1072.212742] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1072.221384] env[62503]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388231, 'name': CreateVM_Task} progress is 15%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1072.224277] env[62503]: DEBUG oslo_vmware.api [None req-473a95d9-18ce-45ff-94a8-19836041b0ec tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Task: {'id': task-1388232, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1072.720036] env[62503]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388231, 'name': CreateVM_Task} progress is 99%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1072.724778] env[62503]: DEBUG oslo_concurrency.lockutils [None req-88808556-2e15-4009-847b-22734c7efba7 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Acquiring lock "94e69c2d-bf7a-42a8-a063-62ad1bb7f927" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1072.725033] env[62503]: DEBUG oslo_concurrency.lockutils [None req-88808556-2e15-4009-847b-22734c7efba7 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Lock "94e69c2d-bf7a-42a8-a063-62ad1bb7f927" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1072.728932] env[62503]: DEBUG oslo_vmware.api [None req-473a95d9-18ce-45ff-94a8-19836041b0ec tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Task: {'id': task-1388232, 'name': ReconfigVM_Task, 'duration_secs': 0.234974} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1072.729366] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-473a95d9-18ce-45ff-94a8-19836041b0ec tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: dd6341e2-cd68-4d12-80e7-51184d448764] Reconfigured VM instance instance-00000064 to detach disk 2001 {{(pid=62503) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1072.734255] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5172f8a1-30de-48d3-b589-1c54d9905660 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.749394] env[62503]: DEBUG oslo_vmware.api [None req-473a95d9-18ce-45ff-94a8-19836041b0ec tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Waiting for the task: (returnval){ [ 1072.749394] env[62503]: value = "task-1388233" [ 1072.749394] env[62503]: _type = "Task" [ 1072.749394] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1072.756952] env[62503]: DEBUG oslo_vmware.api [None req-473a95d9-18ce-45ff-94a8-19836041b0ec tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Task: {'id': task-1388233, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1073.217308] env[62503]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388231, 'name': CreateVM_Task, 'duration_secs': 0.615982} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1073.217484] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0ae63db4-6856-46d8-afa9-876b17152859] Created VM on the ESX host {{(pid=62503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1073.218177] env[62503]: DEBUG oslo_concurrency.lockutils [None req-624e6a9f-6cc4-4a93-bcc3-71b1c59844f4 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1073.218348] env[62503]: DEBUG oslo_concurrency.lockutils [None req-624e6a9f-6cc4-4a93-bcc3-71b1c59844f4 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1073.218659] env[62503]: DEBUG oslo_concurrency.lockutils [None req-624e6a9f-6cc4-4a93-bcc3-71b1c59844f4 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1073.218911] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0a96fa1b-1c80-4f3e-94e7-a362a580cd10 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.223247] env[62503]: DEBUG oslo_vmware.api [None req-624e6a9f-6cc4-4a93-bcc3-71b1c59844f4 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Waiting for the task: (returnval){ [ 1073.223247] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]5228cd77-c92b-4090-75ef-771413bce74d" [ 1073.223247] env[62503]: _type = "Task" [ 1073.223247] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1073.230552] env[62503]: DEBUG nova.compute.manager [None req-88808556-2e15-4009-847b-22734c7efba7 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 94e69c2d-bf7a-42a8-a063-62ad1bb7f927] Starting instance... {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 1073.233102] env[62503]: DEBUG oslo_vmware.api [None req-624e6a9f-6cc4-4a93-bcc3-71b1c59844f4 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]5228cd77-c92b-4090-75ef-771413bce74d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1073.257660] env[62503]: DEBUG oslo_vmware.api [None req-473a95d9-18ce-45ff-94a8-19836041b0ec tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Task: {'id': task-1388233, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1073.733723] env[62503]: DEBUG oslo_vmware.api [None req-624e6a9f-6cc4-4a93-bcc3-71b1c59844f4 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]5228cd77-c92b-4090-75ef-771413bce74d, 'name': SearchDatastore_Task, 'duration_secs': 0.015641} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1073.736332] env[62503]: DEBUG oslo_concurrency.lockutils [None req-624e6a9f-6cc4-4a93-bcc3-71b1c59844f4 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1073.736584] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-624e6a9f-6cc4-4a93-bcc3-71b1c59844f4 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: 0ae63db4-6856-46d8-afa9-876b17152859] Processing image 8150ca02-f879-471d-8913-459408f127a1 {{(pid=62503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1073.736821] env[62503]: DEBUG oslo_concurrency.lockutils [None req-624e6a9f-6cc4-4a93-bcc3-71b1c59844f4 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1073.736970] env[62503]: DEBUG oslo_concurrency.lockutils [None req-624e6a9f-6cc4-4a93-bcc3-71b1c59844f4 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1073.737161] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-624e6a9f-6cc4-4a93-bcc3-71b1c59844f4 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1073.739447] env[62503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-78b925ac-4e30-476f-9a08-ff48f96e7d46 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.748496] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-624e6a9f-6cc4-4a93-bcc3-71b1c59844f4 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1073.748691] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-624e6a9f-6cc4-4a93-bcc3-71b1c59844f4 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1073.749442] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-79d0959f-433e-4026-87ce-031410257893 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.755891] env[62503]: DEBUG oslo_concurrency.lockutils [None req-88808556-2e15-4009-847b-22734c7efba7 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1073.756167] env[62503]: DEBUG oslo_concurrency.lockutils [None req-88808556-2e15-4009-847b-22734c7efba7 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1073.757704] env[62503]: INFO nova.compute.claims [None req-88808556-2e15-4009-847b-22734c7efba7 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 94e69c2d-bf7a-42a8-a063-62ad1bb7f927] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1073.762180] env[62503]: DEBUG oslo_vmware.api [None req-624e6a9f-6cc4-4a93-bcc3-71b1c59844f4 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Waiting for the task: (returnval){ [ 1073.762180] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52ae5a67-6505-1193-6ab1-dac8af98c261" [ 1073.762180] env[62503]: _type = "Task" [ 1073.762180] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1073.768608] env[62503]: DEBUG oslo_vmware.api [None req-473a95d9-18ce-45ff-94a8-19836041b0ec tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Task: {'id': task-1388233, 'name': ReconfigVM_Task, 'duration_secs': 0.759267} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1073.769291] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-473a95d9-18ce-45ff-94a8-19836041b0ec tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: dd6341e2-cd68-4d12-80e7-51184d448764] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-294651', 'volume_id': 'a6b7fd29-ef47-45ee-986f-8544f580f012', 'name': 'volume-a6b7fd29-ef47-45ee-986f-8544f580f012', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'dd6341e2-cd68-4d12-80e7-51184d448764', 'attached_at': '', 'detached_at': '', 'volume_id': 'a6b7fd29-ef47-45ee-986f-8544f580f012', 'serial': 'a6b7fd29-ef47-45ee-986f-8544f580f012'} {{(pid=62503) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1073.774651] env[62503]: DEBUG oslo_vmware.api [None req-624e6a9f-6cc4-4a93-bcc3-71b1c59844f4 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52ae5a67-6505-1193-6ab1-dac8af98c261, 'name': SearchDatastore_Task, 'duration_secs': 0.008332} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1073.775433] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7df9aa58-f68f-451d-8fee-b1f62679569d {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.780835] env[62503]: DEBUG oslo_vmware.api [None req-624e6a9f-6cc4-4a93-bcc3-71b1c59844f4 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Waiting for the task: (returnval){ [ 1073.780835] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52e3e802-51b2-1025-b142-23e6f926b975" [ 1073.780835] env[62503]: _type = "Task" [ 1073.780835] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1073.790053] env[62503]: DEBUG oslo_vmware.api [None req-624e6a9f-6cc4-4a93-bcc3-71b1c59844f4 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52e3e802-51b2-1025-b142-23e6f926b975, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1074.290986] env[62503]: DEBUG oslo_vmware.api [None req-624e6a9f-6cc4-4a93-bcc3-71b1c59844f4 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52e3e802-51b2-1025-b142-23e6f926b975, 'name': SearchDatastore_Task, 'duration_secs': 0.015038} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1074.291269] env[62503]: DEBUG oslo_concurrency.lockutils [None req-624e6a9f-6cc4-4a93-bcc3-71b1c59844f4 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1074.291574] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-624e6a9f-6cc4-4a93-bcc3-71b1c59844f4 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk to [datastore2] 0ae63db4-6856-46d8-afa9-876b17152859/0ae63db4-6856-46d8-afa9-876b17152859.vmdk {{(pid=62503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1074.291817] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3e141e44-0f82-4fda-94a0-7e90f712242d {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.298853] env[62503]: DEBUG oslo_vmware.api [None req-624e6a9f-6cc4-4a93-bcc3-71b1c59844f4 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Waiting for the task: (returnval){ [ 1074.298853] env[62503]: value = "task-1388234" [ 1074.298853] env[62503]: _type = "Task" [ 1074.298853] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1074.306432] env[62503]: DEBUG oslo_vmware.api [None req-624e6a9f-6cc4-4a93-bcc3-71b1c59844f4 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1388234, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1074.314301] env[62503]: DEBUG nova.objects.instance [None req-473a95d9-18ce-45ff-94a8-19836041b0ec tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Lazy-loading 'flavor' on Instance uuid dd6341e2-cd68-4d12-80e7-51184d448764 {{(pid=62503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1074.816096] env[62503]: DEBUG oslo_vmware.api [None req-624e6a9f-6cc4-4a93-bcc3-71b1c59844f4 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1388234, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1074.884929] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48d65ed4-9ea5-4321-b7d8-f5da257e9546 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.892660] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f0cb196-cc52-469e-b2eb-e9b28a943430 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.924538] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a971f0d-943e-4beb-82e3-3893a162d282 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.931985] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ed62f54-a302-4408-94f2-ff99b22f705e {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.947688] env[62503]: DEBUG nova.compute.provider_tree [None req-88808556-2e15-4009-847b-22734c7efba7 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1075.309828] env[62503]: DEBUG oslo_vmware.api [None req-624e6a9f-6cc4-4a93-bcc3-71b1c59844f4 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1388234, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.604646} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1075.310132] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-624e6a9f-6cc4-4a93-bcc3-71b1c59844f4 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk to [datastore2] 0ae63db4-6856-46d8-afa9-876b17152859/0ae63db4-6856-46d8-afa9-876b17152859.vmdk {{(pid=62503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1075.310360] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-624e6a9f-6cc4-4a93-bcc3-71b1c59844f4 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: 0ae63db4-6856-46d8-afa9-876b17152859] Extending root virtual disk to 1048576 {{(pid=62503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1075.310634] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b4029874-2079-4895-b87f-f40812b27452 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.318158] env[62503]: DEBUG oslo_vmware.api [None req-624e6a9f-6cc4-4a93-bcc3-71b1c59844f4 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Waiting for the task: (returnval){ [ 1075.318158] env[62503]: value = "task-1388235" [ 1075.318158] env[62503]: _type = "Task" [ 1075.318158] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1075.322828] env[62503]: DEBUG oslo_concurrency.lockutils [None req-473a95d9-18ce-45ff-94a8-19836041b0ec tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Lock "dd6341e2-cd68-4d12-80e7-51184d448764" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.767s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1075.328470] env[62503]: DEBUG oslo_vmware.api [None req-624e6a9f-6cc4-4a93-bcc3-71b1c59844f4 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1388235, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1075.451345] env[62503]: DEBUG nova.scheduler.client.report [None req-88808556-2e15-4009-847b-22734c7efba7 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1075.791899] env[62503]: DEBUG oslo_concurrency.lockutils [None req-1195d5dc-4420-419b-9cca-cd7212a4217d tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Acquiring lock "dd6341e2-cd68-4d12-80e7-51184d448764" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1075.792233] env[62503]: DEBUG oslo_concurrency.lockutils [None req-1195d5dc-4420-419b-9cca-cd7212a4217d tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Lock "dd6341e2-cd68-4d12-80e7-51184d448764" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1075.792503] env[62503]: DEBUG oslo_concurrency.lockutils [None req-1195d5dc-4420-419b-9cca-cd7212a4217d tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Acquiring lock "dd6341e2-cd68-4d12-80e7-51184d448764-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1075.792710] env[62503]: DEBUG oslo_concurrency.lockutils [None req-1195d5dc-4420-419b-9cca-cd7212a4217d tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Lock "dd6341e2-cd68-4d12-80e7-51184d448764-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1075.792914] env[62503]: DEBUG oslo_concurrency.lockutils [None req-1195d5dc-4420-419b-9cca-cd7212a4217d tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Lock "dd6341e2-cd68-4d12-80e7-51184d448764-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1075.795314] env[62503]: INFO nova.compute.manager [None req-1195d5dc-4420-419b-9cca-cd7212a4217d tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: dd6341e2-cd68-4d12-80e7-51184d448764] Terminating instance [ 1075.797156] env[62503]: DEBUG nova.compute.manager [None req-1195d5dc-4420-419b-9cca-cd7212a4217d tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: dd6341e2-cd68-4d12-80e7-51184d448764] Start destroying the instance on the hypervisor. {{(pid=62503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3156}} [ 1075.797377] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-1195d5dc-4420-419b-9cca-cd7212a4217d tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: dd6341e2-cd68-4d12-80e7-51184d448764] Destroying instance {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1075.798270] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ab60f3a-1f3c-44a2-a366-dc5b72b73dad {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.806560] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-1195d5dc-4420-419b-9cca-cd7212a4217d tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: dd6341e2-cd68-4d12-80e7-51184d448764] Powering off the VM {{(pid=62503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1075.806790] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1ce79d61-efde-449c-b872-b8372844281d {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.814089] env[62503]: DEBUG oslo_vmware.api [None req-1195d5dc-4420-419b-9cca-cd7212a4217d tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Waiting for the task: (returnval){ [ 1075.814089] env[62503]: value = "task-1388236" [ 1075.814089] env[62503]: _type = "Task" [ 1075.814089] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1075.825039] env[62503]: DEBUG oslo_vmware.api [None req-1195d5dc-4420-419b-9cca-cd7212a4217d tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Task: {'id': task-1388236, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1075.829953] env[62503]: DEBUG oslo_vmware.api [None req-624e6a9f-6cc4-4a93-bcc3-71b1c59844f4 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1388235, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073894} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1075.830281] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-624e6a9f-6cc4-4a93-bcc3-71b1c59844f4 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: 0ae63db4-6856-46d8-afa9-876b17152859] Extended root virtual disk {{(pid=62503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1075.831028] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae5e90db-24c4-4fde-a91e-a7e4ce3f02f5 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.852351] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-624e6a9f-6cc4-4a93-bcc3-71b1c59844f4 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: 0ae63db4-6856-46d8-afa9-876b17152859] Reconfiguring VM instance instance-00000069 to attach disk [datastore2] 0ae63db4-6856-46d8-afa9-876b17152859/0ae63db4-6856-46d8-afa9-876b17152859.vmdk or device None with type sparse {{(pid=62503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1075.852646] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-79e06525-024a-4e2e-8e3e-9011dca58f0d {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.871735] env[62503]: DEBUG oslo_vmware.api [None req-624e6a9f-6cc4-4a93-bcc3-71b1c59844f4 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Waiting for the task: (returnval){ [ 1075.871735] env[62503]: value = "task-1388237" [ 1075.871735] env[62503]: _type = "Task" [ 1075.871735] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1075.879447] env[62503]: DEBUG oslo_vmware.api [None req-624e6a9f-6cc4-4a93-bcc3-71b1c59844f4 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1388237, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1075.955894] env[62503]: DEBUG oslo_concurrency.lockutils [None req-88808556-2e15-4009-847b-22734c7efba7 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.199s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1075.956668] env[62503]: DEBUG nova.compute.manager [None req-88808556-2e15-4009-847b-22734c7efba7 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 94e69c2d-bf7a-42a8-a063-62ad1bb7f927] Start building networks asynchronously for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2832}} [ 1076.326694] env[62503]: DEBUG oslo_vmware.api [None req-1195d5dc-4420-419b-9cca-cd7212a4217d tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Task: {'id': task-1388236, 'name': PowerOffVM_Task, 'duration_secs': 0.167078} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1076.326952] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-1195d5dc-4420-419b-9cca-cd7212a4217d tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: dd6341e2-cd68-4d12-80e7-51184d448764] Powered off the VM {{(pid=62503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1076.327135] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-1195d5dc-4420-419b-9cca-cd7212a4217d tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: dd6341e2-cd68-4d12-80e7-51184d448764] Unregistering the VM {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1076.327388] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c3c9808d-3c90-4f1c-acd2-d687142614da {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.380848] env[62503]: DEBUG oslo_vmware.api [None req-624e6a9f-6cc4-4a93-bcc3-71b1c59844f4 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1388237, 'name': ReconfigVM_Task, 'duration_secs': 0.312714} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1076.381141] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-624e6a9f-6cc4-4a93-bcc3-71b1c59844f4 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: 0ae63db4-6856-46d8-afa9-876b17152859] Reconfigured VM instance instance-00000069 to attach disk [datastore2] 0ae63db4-6856-46d8-afa9-876b17152859/0ae63db4-6856-46d8-afa9-876b17152859.vmdk or device None with type sparse {{(pid=62503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1076.381758] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a5177206-05ca-4b8a-9fc2-c67ad2e7d0ac {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.388122] env[62503]: DEBUG oslo_vmware.api [None req-624e6a9f-6cc4-4a93-bcc3-71b1c59844f4 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Waiting for the task: (returnval){ [ 1076.388122] env[62503]: value = "task-1388239" [ 1076.388122] env[62503]: _type = "Task" [ 1076.388122] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1076.394542] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-1195d5dc-4420-419b-9cca-cd7212a4217d tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: dd6341e2-cd68-4d12-80e7-51184d448764] Unregistered the VM {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1076.394768] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-1195d5dc-4420-419b-9cca-cd7212a4217d tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: dd6341e2-cd68-4d12-80e7-51184d448764] Deleting contents of the VM from datastore datastore2 {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1076.395127] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-1195d5dc-4420-419b-9cca-cd7212a4217d tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Deleting the datastore file [datastore2] dd6341e2-cd68-4d12-80e7-51184d448764 {{(pid=62503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1076.398117] env[62503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c89d56f4-9c07-476c-b6c2-3e748f1735fa {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.400059] env[62503]: DEBUG oslo_vmware.api [None req-624e6a9f-6cc4-4a93-bcc3-71b1c59844f4 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1388239, 'name': Rename_Task} progress is 5%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1076.404026] env[62503]: DEBUG oslo_vmware.api [None req-1195d5dc-4420-419b-9cca-cd7212a4217d tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Waiting for the task: (returnval){ [ 1076.404026] env[62503]: value = "task-1388240" [ 1076.404026] env[62503]: _type = "Task" [ 1076.404026] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1076.411207] env[62503]: DEBUG oslo_vmware.api [None req-1195d5dc-4420-419b-9cca-cd7212a4217d tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Task: {'id': task-1388240, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1076.462727] env[62503]: DEBUG nova.compute.utils [None req-88808556-2e15-4009-847b-22734c7efba7 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Using /dev/sd instead of None {{(pid=62503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1076.465261] env[62503]: DEBUG nova.compute.manager [None req-88808556-2e15-4009-847b-22734c7efba7 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 94e69c2d-bf7a-42a8-a063-62ad1bb7f927] Allocating IP information in the background. {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 1076.465261] env[62503]: DEBUG nova.network.neutron [None req-88808556-2e15-4009-847b-22734c7efba7 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 94e69c2d-bf7a-42a8-a063-62ad1bb7f927] allocate_for_instance() {{(pid=62503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1076.516941] env[62503]: DEBUG nova.policy [None req-88808556-2e15-4009-847b-22734c7efba7 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ef723806cc714bf7a98b659c4343a094', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b86eee9480274a9196fc8ccd920671f0', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62503) authorize /opt/stack/nova/nova/policy.py:201}} [ 1076.819969] env[62503]: DEBUG nova.network.neutron [None req-88808556-2e15-4009-847b-22734c7efba7 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 94e69c2d-bf7a-42a8-a063-62ad1bb7f927] Successfully created port: 0a00591c-6583-407d-bf50-60a53719508b {{(pid=62503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1076.898492] env[62503]: DEBUG oslo_vmware.api [None req-624e6a9f-6cc4-4a93-bcc3-71b1c59844f4 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1388239, 'name': Rename_Task, 'duration_secs': 0.147443} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1076.898949] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-624e6a9f-6cc4-4a93-bcc3-71b1c59844f4 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: 0ae63db4-6856-46d8-afa9-876b17152859] Powering on the VM {{(pid=62503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1076.899243] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3ccb9be8-af51-4eba-b77b-374c24a1fd18 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.906679] env[62503]: DEBUG oslo_vmware.api [None req-624e6a9f-6cc4-4a93-bcc3-71b1c59844f4 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Waiting for the task: (returnval){ [ 1076.906679] env[62503]: value = "task-1388241" [ 1076.906679] env[62503]: _type = "Task" [ 1076.906679] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1076.916165] env[62503]: DEBUG oslo_vmware.api [None req-1195d5dc-4420-419b-9cca-cd7212a4217d tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Task: {'id': task-1388240, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.137} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1076.918958] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-1195d5dc-4420-419b-9cca-cd7212a4217d tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Deleted the datastore file {{(pid=62503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1076.919661] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-1195d5dc-4420-419b-9cca-cd7212a4217d tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: dd6341e2-cd68-4d12-80e7-51184d448764] Deleted contents of the VM from datastore datastore2 {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1076.919661] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-1195d5dc-4420-419b-9cca-cd7212a4217d tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: dd6341e2-cd68-4d12-80e7-51184d448764] Instance destroyed {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1076.919661] env[62503]: INFO nova.compute.manager [None req-1195d5dc-4420-419b-9cca-cd7212a4217d tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: dd6341e2-cd68-4d12-80e7-51184d448764] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1076.919997] env[62503]: DEBUG oslo.service.loopingcall [None req-1195d5dc-4420-419b-9cca-cd7212a4217d tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1076.920249] env[62503]: DEBUG oslo_vmware.api [None req-624e6a9f-6cc4-4a93-bcc3-71b1c59844f4 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1388241, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1076.920566] env[62503]: DEBUG nova.compute.manager [-] [instance: dd6341e2-cd68-4d12-80e7-51184d448764] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 1076.920704] env[62503]: DEBUG nova.network.neutron [-] [instance: dd6341e2-cd68-4d12-80e7-51184d448764] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1076.967260] env[62503]: DEBUG nova.compute.manager [None req-88808556-2e15-4009-847b-22734c7efba7 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 94e69c2d-bf7a-42a8-a063-62ad1bb7f927] Start building block device mappings for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2867}} [ 1077.424856] env[62503]: DEBUG oslo_vmware.api [None req-624e6a9f-6cc4-4a93-bcc3-71b1c59844f4 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1388241, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1077.472725] env[62503]: DEBUG nova.compute.manager [req-62e0e0e0-cd8a-40f4-9110-7b8290c24942 req-8ea7e3d9-21c5-43f5-9a99-23ea52a15a9e service nova] [instance: dd6341e2-cd68-4d12-80e7-51184d448764] Received event network-vif-deleted-705c4161-6bc5-4a66-af1d-c76f64ef1a65 {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 1077.473535] env[62503]: INFO nova.compute.manager [req-62e0e0e0-cd8a-40f4-9110-7b8290c24942 req-8ea7e3d9-21c5-43f5-9a99-23ea52a15a9e service nova] [instance: dd6341e2-cd68-4d12-80e7-51184d448764] Neutron deleted interface 705c4161-6bc5-4a66-af1d-c76f64ef1a65; detaching it from the instance and deleting it from the info cache [ 1077.473535] env[62503]: DEBUG nova.network.neutron [req-62e0e0e0-cd8a-40f4-9110-7b8290c24942 req-8ea7e3d9-21c5-43f5-9a99-23ea52a15a9e service nova] [instance: dd6341e2-cd68-4d12-80e7-51184d448764] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1077.920249] env[62503]: DEBUG oslo_vmware.api [None req-624e6a9f-6cc4-4a93-bcc3-71b1c59844f4 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1388241, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1077.951030] env[62503]: DEBUG nova.network.neutron [-] [instance: dd6341e2-cd68-4d12-80e7-51184d448764] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1077.982033] env[62503]: DEBUG nova.compute.manager [None req-88808556-2e15-4009-847b-22734c7efba7 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 94e69c2d-bf7a-42a8-a063-62ad1bb7f927] Start spawning the instance on the hypervisor. {{(pid=62503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2641}} [ 1077.984648] env[62503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-10f26a52-5cd7-4ba7-a77a-94ecacf8d33a {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.994960] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6046405-c459-4eb2-a8a1-fcf87ea2340b {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.013200] env[62503]: DEBUG nova.virt.hardware [None req-88808556-2e15-4009-847b-22734c7efba7 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:26:20Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:26:03Z,direct_url=,disk_format='vmdk',id=8150ca02-f879-471d-8913-459408f127a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26d9ee75d25b4018b8bb1fb11c8bd98c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:26:04Z,virtual_size=,visibility=), allow threads: False {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1078.013457] env[62503]: DEBUG nova.virt.hardware [None req-88808556-2e15-4009-847b-22734c7efba7 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Flavor limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1078.013649] env[62503]: DEBUG nova.virt.hardware [None req-88808556-2e15-4009-847b-22734c7efba7 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Image limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1078.013845] env[62503]: DEBUG nova.virt.hardware [None req-88808556-2e15-4009-847b-22734c7efba7 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Flavor pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1078.013995] env[62503]: DEBUG nova.virt.hardware [None req-88808556-2e15-4009-847b-22734c7efba7 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Image pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1078.014163] env[62503]: DEBUG nova.virt.hardware [None req-88808556-2e15-4009-847b-22734c7efba7 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1078.014374] env[62503]: DEBUG nova.virt.hardware [None req-88808556-2e15-4009-847b-22734c7efba7 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1078.014539] env[62503]: DEBUG nova.virt.hardware [None req-88808556-2e15-4009-847b-22734c7efba7 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1078.014713] env[62503]: DEBUG nova.virt.hardware [None req-88808556-2e15-4009-847b-22734c7efba7 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Got 1 possible topologies {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1078.014878] env[62503]: DEBUG nova.virt.hardware [None req-88808556-2e15-4009-847b-22734c7efba7 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1078.015071] env[62503]: DEBUG nova.virt.hardware [None req-88808556-2e15-4009-847b-22734c7efba7 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1078.015877] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27ca35c3-870e-4083-8eda-1482f9d35f36 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.025249] env[62503]: DEBUG nova.compute.manager [req-62e0e0e0-cd8a-40f4-9110-7b8290c24942 req-8ea7e3d9-21c5-43f5-9a99-23ea52a15a9e service nova] [instance: dd6341e2-cd68-4d12-80e7-51184d448764] Detach interface failed, port_id=705c4161-6bc5-4a66-af1d-c76f64ef1a65, reason: Instance dd6341e2-cd68-4d12-80e7-51184d448764 could not be found. {{(pid=62503) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11261}} [ 1078.030349] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1a60f03-749e-43a1-8295-984ad2a41cee {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.421724] env[62503]: DEBUG oslo_vmware.api [None req-624e6a9f-6cc4-4a93-bcc3-71b1c59844f4 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1388241, 'name': PowerOnVM_Task, 'duration_secs': 1.320072} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1078.422019] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-624e6a9f-6cc4-4a93-bcc3-71b1c59844f4 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: 0ae63db4-6856-46d8-afa9-876b17152859] Powered on the VM {{(pid=62503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1078.422249] env[62503]: INFO nova.compute.manager [None req-624e6a9f-6cc4-4a93-bcc3-71b1c59844f4 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: 0ae63db4-6856-46d8-afa9-876b17152859] Took 9.33 seconds to spawn the instance on the hypervisor. [ 1078.422484] env[62503]: DEBUG nova.compute.manager [None req-624e6a9f-6cc4-4a93-bcc3-71b1c59844f4 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: 0ae63db4-6856-46d8-afa9-876b17152859] Checking state {{(pid=62503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1794}} [ 1078.423271] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cce0c9a-83d7-4a37-93ba-5ff6c3e053e1 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.453495] env[62503]: INFO nova.compute.manager [-] [instance: dd6341e2-cd68-4d12-80e7-51184d448764] Took 1.53 seconds to deallocate network for instance. [ 1078.523795] env[62503]: DEBUG nova.network.neutron [None req-88808556-2e15-4009-847b-22734c7efba7 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 94e69c2d-bf7a-42a8-a063-62ad1bb7f927] Successfully updated port: 0a00591c-6583-407d-bf50-60a53719508b {{(pid=62503) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1078.942556] env[62503]: INFO nova.compute.manager [None req-624e6a9f-6cc4-4a93-bcc3-71b1c59844f4 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: 0ae63db4-6856-46d8-afa9-876b17152859] Took 14.02 seconds to build instance. [ 1078.963877] env[62503]: DEBUG oslo_concurrency.lockutils [None req-1195d5dc-4420-419b-9cca-cd7212a4217d tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1078.963877] env[62503]: DEBUG oslo_concurrency.lockutils [None req-1195d5dc-4420-419b-9cca-cd7212a4217d tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1078.963877] env[62503]: DEBUG nova.objects.instance [None req-1195d5dc-4420-419b-9cca-cd7212a4217d tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Lazy-loading 'resources' on Instance uuid dd6341e2-cd68-4d12-80e7-51184d448764 {{(pid=62503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1079.011482] env[62503]: DEBUG nova.compute.manager [req-f2593dae-fee4-44c4-a146-2216601468ac req-c5804594-d465-4579-9389-a2a9555f0202 service nova] [instance: 0ae63db4-6856-46d8-afa9-876b17152859] Received event network-changed-95590754-6e6e-4929-87d5-f7ac675aaf17 {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 1079.011727] env[62503]: DEBUG nova.compute.manager [req-f2593dae-fee4-44c4-a146-2216601468ac req-c5804594-d465-4579-9389-a2a9555f0202 service nova] [instance: 0ae63db4-6856-46d8-afa9-876b17152859] Refreshing instance network info cache due to event network-changed-95590754-6e6e-4929-87d5-f7ac675aaf17. {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11432}} [ 1079.011991] env[62503]: DEBUG oslo_concurrency.lockutils [req-f2593dae-fee4-44c4-a146-2216601468ac req-c5804594-d465-4579-9389-a2a9555f0202 service nova] Acquiring lock "refresh_cache-0ae63db4-6856-46d8-afa9-876b17152859" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1079.012196] env[62503]: DEBUG oslo_concurrency.lockutils [req-f2593dae-fee4-44c4-a146-2216601468ac req-c5804594-d465-4579-9389-a2a9555f0202 service nova] Acquired lock "refresh_cache-0ae63db4-6856-46d8-afa9-876b17152859" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1079.012404] env[62503]: DEBUG nova.network.neutron [req-f2593dae-fee4-44c4-a146-2216601468ac req-c5804594-d465-4579-9389-a2a9555f0202 service nova] [instance: 0ae63db4-6856-46d8-afa9-876b17152859] Refreshing network info cache for port 95590754-6e6e-4929-87d5-f7ac675aaf17 {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1079.027143] env[62503]: DEBUG oslo_concurrency.lockutils [None req-88808556-2e15-4009-847b-22734c7efba7 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Acquiring lock "refresh_cache-94e69c2d-bf7a-42a8-a063-62ad1bb7f927" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1079.027326] env[62503]: DEBUG oslo_concurrency.lockutils [None req-88808556-2e15-4009-847b-22734c7efba7 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Acquired lock "refresh_cache-94e69c2d-bf7a-42a8-a063-62ad1bb7f927" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1079.027509] env[62503]: DEBUG nova.network.neutron [None req-88808556-2e15-4009-847b-22734c7efba7 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 94e69c2d-bf7a-42a8-a063-62ad1bb7f927] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1079.443848] env[62503]: DEBUG oslo_concurrency.lockutils [None req-624e6a9f-6cc4-4a93-bcc3-71b1c59844f4 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Lock "0ae63db4-6856-46d8-afa9-876b17152859" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.531s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1079.499982] env[62503]: DEBUG nova.compute.manager [req-dbae4196-e7b9-4404-b59e-57fd64252d78 req-f5b66a9c-9351-40d7-b90e-da553cf6920a service nova] [instance: 94e69c2d-bf7a-42a8-a063-62ad1bb7f927] Received event network-vif-plugged-0a00591c-6583-407d-bf50-60a53719508b {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 1079.499982] env[62503]: DEBUG oslo_concurrency.lockutils [req-dbae4196-e7b9-4404-b59e-57fd64252d78 req-f5b66a9c-9351-40d7-b90e-da553cf6920a service nova] Acquiring lock "94e69c2d-bf7a-42a8-a063-62ad1bb7f927-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1079.499982] env[62503]: DEBUG oslo_concurrency.lockutils [req-dbae4196-e7b9-4404-b59e-57fd64252d78 req-f5b66a9c-9351-40d7-b90e-da553cf6920a service nova] Lock "94e69c2d-bf7a-42a8-a063-62ad1bb7f927-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1079.500386] env[62503]: DEBUG oslo_concurrency.lockutils [req-dbae4196-e7b9-4404-b59e-57fd64252d78 req-f5b66a9c-9351-40d7-b90e-da553cf6920a service nova] Lock "94e69c2d-bf7a-42a8-a063-62ad1bb7f927-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1079.500386] env[62503]: DEBUG nova.compute.manager [req-dbae4196-e7b9-4404-b59e-57fd64252d78 req-f5b66a9c-9351-40d7-b90e-da553cf6920a service nova] [instance: 94e69c2d-bf7a-42a8-a063-62ad1bb7f927] No waiting events found dispatching network-vif-plugged-0a00591c-6583-407d-bf50-60a53719508b {{(pid=62503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1079.500507] env[62503]: WARNING nova.compute.manager [req-dbae4196-e7b9-4404-b59e-57fd64252d78 req-f5b66a9c-9351-40d7-b90e-da553cf6920a service nova] [instance: 94e69c2d-bf7a-42a8-a063-62ad1bb7f927] Received unexpected event network-vif-plugged-0a00591c-6583-407d-bf50-60a53719508b for instance with vm_state building and task_state spawning. [ 1079.500627] env[62503]: DEBUG nova.compute.manager [req-dbae4196-e7b9-4404-b59e-57fd64252d78 req-f5b66a9c-9351-40d7-b90e-da553cf6920a service nova] [instance: 94e69c2d-bf7a-42a8-a063-62ad1bb7f927] Received event network-changed-0a00591c-6583-407d-bf50-60a53719508b {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 1079.500783] env[62503]: DEBUG nova.compute.manager [req-dbae4196-e7b9-4404-b59e-57fd64252d78 req-f5b66a9c-9351-40d7-b90e-da553cf6920a service nova] [instance: 94e69c2d-bf7a-42a8-a063-62ad1bb7f927] Refreshing instance network info cache due to event network-changed-0a00591c-6583-407d-bf50-60a53719508b. {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11432}} [ 1079.500951] env[62503]: DEBUG oslo_concurrency.lockutils [req-dbae4196-e7b9-4404-b59e-57fd64252d78 req-f5b66a9c-9351-40d7-b90e-da553cf6920a service nova] Acquiring lock "refresh_cache-94e69c2d-bf7a-42a8-a063-62ad1bb7f927" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1079.563746] env[62503]: DEBUG nova.network.neutron [None req-88808556-2e15-4009-847b-22734c7efba7 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 94e69c2d-bf7a-42a8-a063-62ad1bb7f927] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1079.634299] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b2411f4-9635-4820-bc79-06cd2b6b2f23 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.642031] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9710c740-b68d-40f4-ad44-483039aef5cb {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.684167] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e36d24f-7c80-4cc8-9d69-b68eb8b67a0e {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.691814] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9395d18d-ab69-4b2f-9c6a-d3d1086d5b24 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.706109] env[62503]: DEBUG nova.compute.provider_tree [None req-1195d5dc-4420-419b-9cca-cd7212a4217d tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1079.852829] env[62503]: DEBUG nova.network.neutron [None req-88808556-2e15-4009-847b-22734c7efba7 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 94e69c2d-bf7a-42a8-a063-62ad1bb7f927] Updating instance_info_cache with network_info: [{"id": "0a00591c-6583-407d-bf50-60a53719508b", "address": "fa:16:3e:71:8a:86", "network": {"id": "62a43567-d25a-406f-a4c8-68e056f67595", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-156718245-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b86eee9480274a9196fc8ccd920671f0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b8b5b5e2-866d-4ab5-b74d-4a47de0c4877", "external-id": "nsx-vlan-transportzone-74", "segmentation_id": 74, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0a00591c-65", "ovs_interfaceid": "0a00591c-6583-407d-bf50-60a53719508b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1079.904416] env[62503]: DEBUG nova.network.neutron [req-f2593dae-fee4-44c4-a146-2216601468ac req-c5804594-d465-4579-9389-a2a9555f0202 service nova] [instance: 0ae63db4-6856-46d8-afa9-876b17152859] Updated VIF entry in instance network info cache for port 95590754-6e6e-4929-87d5-f7ac675aaf17. {{(pid=62503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1079.904973] env[62503]: DEBUG nova.network.neutron [req-f2593dae-fee4-44c4-a146-2216601468ac req-c5804594-d465-4579-9389-a2a9555f0202 service nova] [instance: 0ae63db4-6856-46d8-afa9-876b17152859] Updating instance_info_cache with network_info: [{"id": "95590754-6e6e-4929-87d5-f7ac675aaf17", "address": "fa:16:3e:22:f9:d7", "network": {"id": "1705446b-db63-4b14-9929-b692ca586b36", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1437181237-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e818e5ee9dc24efa96747c9558514a15", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "77aa121f-8fb6-42f3-aaea-43addfe449b2", "external-id": "nsx-vlan-transportzone-288", "segmentation_id": 288, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap95590754-6e", "ovs_interfaceid": "95590754-6e6e-4929-87d5-f7ac675aaf17", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1080.209070] env[62503]: DEBUG nova.scheduler.client.report [None req-1195d5dc-4420-419b-9cca-cd7212a4217d tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1080.355326] env[62503]: DEBUG oslo_concurrency.lockutils [None req-88808556-2e15-4009-847b-22734c7efba7 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Releasing lock "refresh_cache-94e69c2d-bf7a-42a8-a063-62ad1bb7f927" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1080.355658] env[62503]: DEBUG nova.compute.manager [None req-88808556-2e15-4009-847b-22734c7efba7 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 94e69c2d-bf7a-42a8-a063-62ad1bb7f927] Instance network_info: |[{"id": "0a00591c-6583-407d-bf50-60a53719508b", "address": "fa:16:3e:71:8a:86", "network": {"id": "62a43567-d25a-406f-a4c8-68e056f67595", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-156718245-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b86eee9480274a9196fc8ccd920671f0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b8b5b5e2-866d-4ab5-b74d-4a47de0c4877", "external-id": "nsx-vlan-transportzone-74", "segmentation_id": 74, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0a00591c-65", "ovs_interfaceid": "0a00591c-6583-407d-bf50-60a53719508b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1999}} [ 1080.355994] env[62503]: DEBUG oslo_concurrency.lockutils [req-dbae4196-e7b9-4404-b59e-57fd64252d78 req-f5b66a9c-9351-40d7-b90e-da553cf6920a service nova] Acquired lock "refresh_cache-94e69c2d-bf7a-42a8-a063-62ad1bb7f927" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1080.356215] env[62503]: DEBUG nova.network.neutron [req-dbae4196-e7b9-4404-b59e-57fd64252d78 req-f5b66a9c-9351-40d7-b90e-da553cf6920a service nova] [instance: 94e69c2d-bf7a-42a8-a063-62ad1bb7f927] Refreshing network info cache for port 0a00591c-6583-407d-bf50-60a53719508b {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1080.357485] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-88808556-2e15-4009-847b-22734c7efba7 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 94e69c2d-bf7a-42a8-a063-62ad1bb7f927] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:71:8a:86', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b8b5b5e2-866d-4ab5-b74d-4a47de0c4877', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0a00591c-6583-407d-bf50-60a53719508b', 'vif_model': 'vmxnet3'}] {{(pid=62503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1080.366060] env[62503]: DEBUG oslo.service.loopingcall [None req-88808556-2e15-4009-847b-22734c7efba7 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1080.366976] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 94e69c2d-bf7a-42a8-a063-62ad1bb7f927] Creating VM on the ESX host {{(pid=62503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1080.367233] env[62503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0e895e1b-b751-42ab-87fd-59b583232e26 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.387673] env[62503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1080.387673] env[62503]: value = "task-1388243" [ 1080.387673] env[62503]: _type = "Task" [ 1080.387673] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1080.394912] env[62503]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388243, 'name': CreateVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1080.407936] env[62503]: DEBUG oslo_concurrency.lockutils [req-f2593dae-fee4-44c4-a146-2216601468ac req-c5804594-d465-4579-9389-a2a9555f0202 service nova] Releasing lock "refresh_cache-0ae63db4-6856-46d8-afa9-876b17152859" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1080.714432] env[62503]: DEBUG oslo_concurrency.lockutils [None req-1195d5dc-4420-419b-9cca-cd7212a4217d tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.751s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1080.737556] env[62503]: INFO nova.scheduler.client.report [None req-1195d5dc-4420-419b-9cca-cd7212a4217d tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Deleted allocations for instance dd6341e2-cd68-4d12-80e7-51184d448764 [ 1080.898819] env[62503]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388243, 'name': CreateVM_Task, 'duration_secs': 0.291785} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1080.899128] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 94e69c2d-bf7a-42a8-a063-62ad1bb7f927] Created VM on the ESX host {{(pid=62503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1080.899798] env[62503]: DEBUG oslo_concurrency.lockutils [None req-88808556-2e15-4009-847b-22734c7efba7 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1080.900015] env[62503]: DEBUG oslo_concurrency.lockutils [None req-88808556-2e15-4009-847b-22734c7efba7 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1080.900399] env[62503]: DEBUG oslo_concurrency.lockutils [None req-88808556-2e15-4009-847b-22734c7efba7 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1080.900701] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d8ee625e-264f-49ae-9ce3-60f3cb3d74df {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.905494] env[62503]: DEBUG oslo_vmware.api [None req-88808556-2e15-4009-847b-22734c7efba7 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Waiting for the task: (returnval){ [ 1080.905494] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52b6595d-1362-f046-a715-f994909a6dfd" [ 1080.905494] env[62503]: _type = "Task" [ 1080.905494] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1080.913793] env[62503]: DEBUG oslo_vmware.api [None req-88808556-2e15-4009-847b-22734c7efba7 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52b6595d-1362-f046-a715-f994909a6dfd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1081.112875] env[62503]: DEBUG nova.network.neutron [req-dbae4196-e7b9-4404-b59e-57fd64252d78 req-f5b66a9c-9351-40d7-b90e-da553cf6920a service nova] [instance: 94e69c2d-bf7a-42a8-a063-62ad1bb7f927] Updated VIF entry in instance network info cache for port 0a00591c-6583-407d-bf50-60a53719508b. {{(pid=62503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1081.113250] env[62503]: DEBUG nova.network.neutron [req-dbae4196-e7b9-4404-b59e-57fd64252d78 req-f5b66a9c-9351-40d7-b90e-da553cf6920a service nova] [instance: 94e69c2d-bf7a-42a8-a063-62ad1bb7f927] Updating instance_info_cache with network_info: [{"id": "0a00591c-6583-407d-bf50-60a53719508b", "address": "fa:16:3e:71:8a:86", "network": {"id": "62a43567-d25a-406f-a4c8-68e056f67595", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-156718245-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b86eee9480274a9196fc8ccd920671f0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b8b5b5e2-866d-4ab5-b74d-4a47de0c4877", "external-id": "nsx-vlan-transportzone-74", "segmentation_id": 74, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0a00591c-65", "ovs_interfaceid": "0a00591c-6583-407d-bf50-60a53719508b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1081.244918] env[62503]: DEBUG oslo_concurrency.lockutils [None req-1195d5dc-4420-419b-9cca-cd7212a4217d tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Lock "dd6341e2-cd68-4d12-80e7-51184d448764" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.453s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1081.416783] env[62503]: DEBUG oslo_vmware.api [None req-88808556-2e15-4009-847b-22734c7efba7 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52b6595d-1362-f046-a715-f994909a6dfd, 'name': SearchDatastore_Task, 'duration_secs': 0.009308} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1081.417091] env[62503]: DEBUG oslo_concurrency.lockutils [None req-88808556-2e15-4009-847b-22734c7efba7 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1081.417334] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-88808556-2e15-4009-847b-22734c7efba7 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 94e69c2d-bf7a-42a8-a063-62ad1bb7f927] Processing image 8150ca02-f879-471d-8913-459408f127a1 {{(pid=62503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1081.417573] env[62503]: DEBUG oslo_concurrency.lockutils [None req-88808556-2e15-4009-847b-22734c7efba7 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1081.417725] env[62503]: DEBUG oslo_concurrency.lockutils [None req-88808556-2e15-4009-847b-22734c7efba7 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1081.417904] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-88808556-2e15-4009-847b-22734c7efba7 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1081.418180] env[62503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3787746d-8693-4b6d-ad0e-91470e778d62 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.427198] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-88808556-2e15-4009-847b-22734c7efba7 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1081.427407] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-88808556-2e15-4009-847b-22734c7efba7 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1081.428134] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aa03c29b-ca56-45fb-8ddc-2e6607cd84fe {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.433013] env[62503]: DEBUG oslo_vmware.api [None req-88808556-2e15-4009-847b-22734c7efba7 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Waiting for the task: (returnval){ [ 1081.433013] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]524562d7-56da-3b71-0070-1fc3f1fd5492" [ 1081.433013] env[62503]: _type = "Task" [ 1081.433013] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1081.440313] env[62503]: DEBUG oslo_vmware.api [None req-88808556-2e15-4009-847b-22734c7efba7 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]524562d7-56da-3b71-0070-1fc3f1fd5492, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1081.616327] env[62503]: DEBUG oslo_concurrency.lockutils [req-dbae4196-e7b9-4404-b59e-57fd64252d78 req-f5b66a9c-9351-40d7-b90e-da553cf6920a service nova] Releasing lock "refresh_cache-94e69c2d-bf7a-42a8-a063-62ad1bb7f927" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1081.944285] env[62503]: DEBUG oslo_vmware.api [None req-88808556-2e15-4009-847b-22734c7efba7 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]524562d7-56da-3b71-0070-1fc3f1fd5492, 'name': SearchDatastore_Task, 'duration_secs': 0.008453} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1081.944738] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a4e69146-98a7-493a-87ee-699ec842a1eb {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.949938] env[62503]: DEBUG oslo_vmware.api [None req-88808556-2e15-4009-847b-22734c7efba7 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Waiting for the task: (returnval){ [ 1081.949938] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52eefa9a-4d96-d03a-2895-12f62efb4fa8" [ 1081.949938] env[62503]: _type = "Task" [ 1081.949938] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1081.958669] env[62503]: DEBUG oslo_vmware.api [None req-88808556-2e15-4009-847b-22734c7efba7 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52eefa9a-4d96-d03a-2895-12f62efb4fa8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1082.460168] env[62503]: DEBUG oslo_vmware.api [None req-88808556-2e15-4009-847b-22734c7efba7 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52eefa9a-4d96-d03a-2895-12f62efb4fa8, 'name': SearchDatastore_Task, 'duration_secs': 0.008692} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1082.460499] env[62503]: DEBUG oslo_concurrency.lockutils [None req-88808556-2e15-4009-847b-22734c7efba7 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1082.460743] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-88808556-2e15-4009-847b-22734c7efba7 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk to [datastore2] 94e69c2d-bf7a-42a8-a063-62ad1bb7f927/94e69c2d-bf7a-42a8-a063-62ad1bb7f927.vmdk {{(pid=62503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1082.461047] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1f220681-fd2c-44f0-ae8e-864767ad4727 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.467183] env[62503]: DEBUG oslo_vmware.api [None req-88808556-2e15-4009-847b-22734c7efba7 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Waiting for the task: (returnval){ [ 1082.467183] env[62503]: value = "task-1388244" [ 1082.467183] env[62503]: _type = "Task" [ 1082.467183] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1082.474928] env[62503]: DEBUG oslo_vmware.api [None req-88808556-2e15-4009-847b-22734c7efba7 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Task: {'id': task-1388244, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1082.753697] env[62503]: DEBUG oslo_concurrency.lockutils [None req-ec5c9272-eebd-4f44-b746-8bb6fbced38f tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Acquiring lock "477b21f4-cac1-48f1-862a-0b283b336d72" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1082.753952] env[62503]: DEBUG oslo_concurrency.lockutils [None req-ec5c9272-eebd-4f44-b746-8bb6fbced38f tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Lock "477b21f4-cac1-48f1-862a-0b283b336d72" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1082.979109] env[62503]: DEBUG oslo_vmware.api [None req-88808556-2e15-4009-847b-22734c7efba7 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Task: {'id': task-1388244, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1083.256539] env[62503]: DEBUG nova.compute.manager [None req-ec5c9272-eebd-4f44-b746-8bb6fbced38f tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: 477b21f4-cac1-48f1-862a-0b283b336d72] Starting instance... {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 1083.478503] env[62503]: DEBUG oslo_vmware.api [None req-88808556-2e15-4009-847b-22734c7efba7 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Task: {'id': task-1388244, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.547589} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1083.478852] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-88808556-2e15-4009-847b-22734c7efba7 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk to [datastore2] 94e69c2d-bf7a-42a8-a063-62ad1bb7f927/94e69c2d-bf7a-42a8-a063-62ad1bb7f927.vmdk {{(pid=62503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1083.478942] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-88808556-2e15-4009-847b-22734c7efba7 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 94e69c2d-bf7a-42a8-a063-62ad1bb7f927] Extending root virtual disk to 1048576 {{(pid=62503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1083.479238] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-debcaeed-b108-43cc-9e02-e2223621dcb6 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.485981] env[62503]: DEBUG oslo_vmware.api [None req-88808556-2e15-4009-847b-22734c7efba7 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Waiting for the task: (returnval){ [ 1083.485981] env[62503]: value = "task-1388246" [ 1083.485981] env[62503]: _type = "Task" [ 1083.485981] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1083.493798] env[62503]: DEBUG oslo_vmware.api [None req-88808556-2e15-4009-847b-22734c7efba7 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Task: {'id': task-1388246, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1083.778454] env[62503]: DEBUG oslo_concurrency.lockutils [None req-ec5c9272-eebd-4f44-b746-8bb6fbced38f tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1083.778750] env[62503]: DEBUG oslo_concurrency.lockutils [None req-ec5c9272-eebd-4f44-b746-8bb6fbced38f tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1083.780311] env[62503]: INFO nova.compute.claims [None req-ec5c9272-eebd-4f44-b746-8bb6fbced38f tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: 477b21f4-cac1-48f1-862a-0b283b336d72] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1083.996545] env[62503]: DEBUG oslo_vmware.api [None req-88808556-2e15-4009-847b-22734c7efba7 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Task: {'id': task-1388246, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.096249} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1083.996845] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-88808556-2e15-4009-847b-22734c7efba7 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 94e69c2d-bf7a-42a8-a063-62ad1bb7f927] Extended root virtual disk {{(pid=62503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1083.997616] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18869611-e083-4dbe-a351-c4479e115398 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.018730] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-88808556-2e15-4009-847b-22734c7efba7 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 94e69c2d-bf7a-42a8-a063-62ad1bb7f927] Reconfiguring VM instance instance-0000006a to attach disk [datastore2] 94e69c2d-bf7a-42a8-a063-62ad1bb7f927/94e69c2d-bf7a-42a8-a063-62ad1bb7f927.vmdk or device None with type sparse {{(pid=62503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1084.018931] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-503d805d-3a98-464d-94d3-5b2963df6160 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.037871] env[62503]: DEBUG oslo_vmware.api [None req-88808556-2e15-4009-847b-22734c7efba7 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Waiting for the task: (returnval){ [ 1084.037871] env[62503]: value = "task-1388247" [ 1084.037871] env[62503]: _type = "Task" [ 1084.037871] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1084.044851] env[62503]: DEBUG oslo_vmware.api [None req-88808556-2e15-4009-847b-22734c7efba7 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Task: {'id': task-1388247, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.549323] env[62503]: DEBUG oslo_vmware.api [None req-88808556-2e15-4009-847b-22734c7efba7 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Task: {'id': task-1388247, 'name': ReconfigVM_Task, 'duration_secs': 0.286045} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1084.549755] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-88808556-2e15-4009-847b-22734c7efba7 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 94e69c2d-bf7a-42a8-a063-62ad1bb7f927] Reconfigured VM instance instance-0000006a to attach disk [datastore2] 94e69c2d-bf7a-42a8-a063-62ad1bb7f927/94e69c2d-bf7a-42a8-a063-62ad1bb7f927.vmdk or device None with type sparse {{(pid=62503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1084.550663] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-77c007e7-0df5-41d2-87e5-ce14f0df5c57 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.557890] env[62503]: DEBUG oslo_vmware.api [None req-88808556-2e15-4009-847b-22734c7efba7 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Waiting for the task: (returnval){ [ 1084.557890] env[62503]: value = "task-1388248" [ 1084.557890] env[62503]: _type = "Task" [ 1084.557890] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1084.566232] env[62503]: DEBUG oslo_vmware.api [None req-88808556-2e15-4009-847b-22734c7efba7 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Task: {'id': task-1388248, 'name': Rename_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.870269] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63659242-296f-4249-a63e-9331e700eacf {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.878008] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04b26765-77ef-4187-94e6-19e0edc0b45d {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.908849] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-001b02c6-0178-475c-9fc6-39638df89301 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.916011] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19f6b97b-4f76-42d3-8aae-25637983e3f4 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.928997] env[62503]: DEBUG nova.compute.provider_tree [None req-ec5c9272-eebd-4f44-b746-8bb6fbced38f tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1085.067201] env[62503]: DEBUG oslo_vmware.api [None req-88808556-2e15-4009-847b-22734c7efba7 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Task: {'id': task-1388248, 'name': Rename_Task, 'duration_secs': 0.135896} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1085.067532] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-88808556-2e15-4009-847b-22734c7efba7 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 94e69c2d-bf7a-42a8-a063-62ad1bb7f927] Powering on the VM {{(pid=62503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1085.067819] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-17f07b9c-8471-4380-95ed-c802e955d439 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.073023] env[62503]: DEBUG oslo_vmware.api [None req-88808556-2e15-4009-847b-22734c7efba7 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Waiting for the task: (returnval){ [ 1085.073023] env[62503]: value = "task-1388250" [ 1085.073023] env[62503]: _type = "Task" [ 1085.073023] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1085.080410] env[62503]: DEBUG oslo_vmware.api [None req-88808556-2e15-4009-847b-22734c7efba7 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Task: {'id': task-1388250, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.432830] env[62503]: DEBUG nova.scheduler.client.report [None req-ec5c9272-eebd-4f44-b746-8bb6fbced38f tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1085.582928] env[62503]: DEBUG oslo_vmware.api [None req-88808556-2e15-4009-847b-22734c7efba7 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Task: {'id': task-1388250, 'name': PowerOnVM_Task, 'duration_secs': 0.482761} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1085.583217] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-88808556-2e15-4009-847b-22734c7efba7 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 94e69c2d-bf7a-42a8-a063-62ad1bb7f927] Powered on the VM {{(pid=62503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1085.583427] env[62503]: INFO nova.compute.manager [None req-88808556-2e15-4009-847b-22734c7efba7 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 94e69c2d-bf7a-42a8-a063-62ad1bb7f927] Took 7.60 seconds to spawn the instance on the hypervisor. [ 1085.583622] env[62503]: DEBUG nova.compute.manager [None req-88808556-2e15-4009-847b-22734c7efba7 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 94e69c2d-bf7a-42a8-a063-62ad1bb7f927] Checking state {{(pid=62503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1794}} [ 1085.584390] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-235984a7-b88d-41b2-a06a-6028b23d52f6 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.938012] env[62503]: DEBUG oslo_concurrency.lockutils [None req-ec5c9272-eebd-4f44-b746-8bb6fbced38f tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.159s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1085.938556] env[62503]: DEBUG nova.compute.manager [None req-ec5c9272-eebd-4f44-b746-8bb6fbced38f tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: 477b21f4-cac1-48f1-862a-0b283b336d72] Start building networks asynchronously for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2832}} [ 1086.101503] env[62503]: INFO nova.compute.manager [None req-88808556-2e15-4009-847b-22734c7efba7 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 94e69c2d-bf7a-42a8-a063-62ad1bb7f927] Took 12.37 seconds to build instance. [ 1086.444055] env[62503]: DEBUG nova.compute.utils [None req-ec5c9272-eebd-4f44-b746-8bb6fbced38f tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Using /dev/sd instead of None {{(pid=62503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1086.445496] env[62503]: DEBUG nova.compute.manager [None req-ec5c9272-eebd-4f44-b746-8bb6fbced38f tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: 477b21f4-cac1-48f1-862a-0b283b336d72] Allocating IP information in the background. {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 1086.445675] env[62503]: DEBUG nova.network.neutron [None req-ec5c9272-eebd-4f44-b746-8bb6fbced38f tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: 477b21f4-cac1-48f1-862a-0b283b336d72] allocate_for_instance() {{(pid=62503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1086.483154] env[62503]: DEBUG nova.policy [None req-ec5c9272-eebd-4f44-b746-8bb6fbced38f tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8d1fa794892747598a9c0b50bfd82581', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '12a42517cf8f4ad3836f2f95e8833dd4', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62503) authorize /opt/stack/nova/nova/policy.py:201}} [ 1086.603993] env[62503]: DEBUG oslo_concurrency.lockutils [None req-88808556-2e15-4009-847b-22734c7efba7 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Lock "94e69c2d-bf7a-42a8-a063-62ad1bb7f927" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.879s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1086.947101] env[62503]: DEBUG nova.network.neutron [None req-ec5c9272-eebd-4f44-b746-8bb6fbced38f tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: 477b21f4-cac1-48f1-862a-0b283b336d72] Successfully created port: 5a42bd1b-f70d-4aba-8070-8636cad420fc {{(pid=62503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1086.949296] env[62503]: DEBUG nova.compute.manager [None req-ec5c9272-eebd-4f44-b746-8bb6fbced38f tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: 477b21f4-cac1-48f1-862a-0b283b336d72] Start building block device mappings for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2867}} [ 1087.166990] env[62503]: DEBUG oslo_service.periodic_task [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=62503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1087.167283] env[62503]: DEBUG nova.compute.manager [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Cleaning up deleted instances with incomplete migration {{(pid=62503) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11562}} [ 1087.328733] env[62503]: DEBUG nova.compute.manager [req-c30356ea-84a1-4162-9058-ff67c5354e7a req-8e6da210-0499-4a72-bbc1-3d26a85fe26f service nova] [instance: 94e69c2d-bf7a-42a8-a063-62ad1bb7f927] Received event network-changed-0a00591c-6583-407d-bf50-60a53719508b {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 1087.328942] env[62503]: DEBUG nova.compute.manager [req-c30356ea-84a1-4162-9058-ff67c5354e7a req-8e6da210-0499-4a72-bbc1-3d26a85fe26f service nova] [instance: 94e69c2d-bf7a-42a8-a063-62ad1bb7f927] Refreshing instance network info cache due to event network-changed-0a00591c-6583-407d-bf50-60a53719508b. {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11432}} [ 1087.329172] env[62503]: DEBUG oslo_concurrency.lockutils [req-c30356ea-84a1-4162-9058-ff67c5354e7a req-8e6da210-0499-4a72-bbc1-3d26a85fe26f service nova] Acquiring lock "refresh_cache-94e69c2d-bf7a-42a8-a063-62ad1bb7f927" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1087.329323] env[62503]: DEBUG oslo_concurrency.lockutils [req-c30356ea-84a1-4162-9058-ff67c5354e7a req-8e6da210-0499-4a72-bbc1-3d26a85fe26f service nova] Acquired lock "refresh_cache-94e69c2d-bf7a-42a8-a063-62ad1bb7f927" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1087.329483] env[62503]: DEBUG nova.network.neutron [req-c30356ea-84a1-4162-9058-ff67c5354e7a req-8e6da210-0499-4a72-bbc1-3d26a85fe26f service nova] [instance: 94e69c2d-bf7a-42a8-a063-62ad1bb7f927] Refreshing network info cache for port 0a00591c-6583-407d-bf50-60a53719508b {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1087.961681] env[62503]: DEBUG nova.compute.manager [None req-ec5c9272-eebd-4f44-b746-8bb6fbced38f tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: 477b21f4-cac1-48f1-862a-0b283b336d72] Start spawning the instance on the hypervisor. {{(pid=62503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2641}} [ 1087.988377] env[62503]: DEBUG nova.virt.hardware [None req-ec5c9272-eebd-4f44-b746-8bb6fbced38f tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:26:20Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:26:03Z,direct_url=,disk_format='vmdk',id=8150ca02-f879-471d-8913-459408f127a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26d9ee75d25b4018b8bb1fb11c8bd98c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:26:04Z,virtual_size=,visibility=), allow threads: False {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1087.988802] env[62503]: DEBUG nova.virt.hardware [None req-ec5c9272-eebd-4f44-b746-8bb6fbced38f tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Flavor limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1087.989096] env[62503]: DEBUG nova.virt.hardware [None req-ec5c9272-eebd-4f44-b746-8bb6fbced38f tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Image limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1087.989393] env[62503]: DEBUG nova.virt.hardware [None req-ec5c9272-eebd-4f44-b746-8bb6fbced38f tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Flavor pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1087.989625] env[62503]: DEBUG nova.virt.hardware [None req-ec5c9272-eebd-4f44-b746-8bb6fbced38f tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Image pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1087.989857] env[62503]: DEBUG nova.virt.hardware [None req-ec5c9272-eebd-4f44-b746-8bb6fbced38f tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1087.990173] env[62503]: DEBUG nova.virt.hardware [None req-ec5c9272-eebd-4f44-b746-8bb6fbced38f tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1087.990473] env[62503]: DEBUG nova.virt.hardware [None req-ec5c9272-eebd-4f44-b746-8bb6fbced38f tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1087.990699] env[62503]: DEBUG nova.virt.hardware [None req-ec5c9272-eebd-4f44-b746-8bb6fbced38f tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Got 1 possible topologies {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1087.990940] env[62503]: DEBUG nova.virt.hardware [None req-ec5c9272-eebd-4f44-b746-8bb6fbced38f tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1087.991211] env[62503]: DEBUG nova.virt.hardware [None req-ec5c9272-eebd-4f44-b746-8bb6fbced38f tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1087.992389] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50db6e4c-503b-4e4d-af9b-9aecb7e792b0 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.004442] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86b85d7e-282e-47f4-a7b1-b526347d4536 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.066020] env[62503]: DEBUG nova.network.neutron [req-c30356ea-84a1-4162-9058-ff67c5354e7a req-8e6da210-0499-4a72-bbc1-3d26a85fe26f service nova] [instance: 94e69c2d-bf7a-42a8-a063-62ad1bb7f927] Updated VIF entry in instance network info cache for port 0a00591c-6583-407d-bf50-60a53719508b. {{(pid=62503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1088.066519] env[62503]: DEBUG nova.network.neutron [req-c30356ea-84a1-4162-9058-ff67c5354e7a req-8e6da210-0499-4a72-bbc1-3d26a85fe26f service nova] [instance: 94e69c2d-bf7a-42a8-a063-62ad1bb7f927] Updating instance_info_cache with network_info: [{"id": "0a00591c-6583-407d-bf50-60a53719508b", "address": "fa:16:3e:71:8a:86", "network": {"id": "62a43567-d25a-406f-a4c8-68e056f67595", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-156718245-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.234", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b86eee9480274a9196fc8ccd920671f0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b8b5b5e2-866d-4ab5-b74d-4a47de0c4877", "external-id": "nsx-vlan-transportzone-74", "segmentation_id": 74, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0a00591c-65", "ovs_interfaceid": "0a00591c-6583-407d-bf50-60a53719508b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1088.569555] env[62503]: DEBUG oslo_concurrency.lockutils [req-c30356ea-84a1-4162-9058-ff67c5354e7a req-8e6da210-0499-4a72-bbc1-3d26a85fe26f service nova] Releasing lock "refresh_cache-94e69c2d-bf7a-42a8-a063-62ad1bb7f927" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1088.590814] env[62503]: DEBUG nova.network.neutron [None req-ec5c9272-eebd-4f44-b746-8bb6fbced38f tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: 477b21f4-cac1-48f1-862a-0b283b336d72] Successfully updated port: 5a42bd1b-f70d-4aba-8070-8636cad420fc {{(pid=62503) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1088.671984] env[62503]: DEBUG oslo_service.periodic_task [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=62503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1088.671984] env[62503]: DEBUG nova.compute.manager [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Cleaning up deleted instances {{(pid=62503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11524}} [ 1089.093550] env[62503]: DEBUG oslo_concurrency.lockutils [None req-ec5c9272-eebd-4f44-b746-8bb6fbced38f tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Acquiring lock "refresh_cache-477b21f4-cac1-48f1-862a-0b283b336d72" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1089.093889] env[62503]: DEBUG oslo_concurrency.lockutils [None req-ec5c9272-eebd-4f44-b746-8bb6fbced38f tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Acquired lock "refresh_cache-477b21f4-cac1-48f1-862a-0b283b336d72" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1089.093889] env[62503]: DEBUG nova.network.neutron [None req-ec5c9272-eebd-4f44-b746-8bb6fbced38f tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: 477b21f4-cac1-48f1-862a-0b283b336d72] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1089.183908] env[62503]: DEBUG nova.compute.manager [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] There are 37 instances to clean {{(pid=62503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11533}} [ 1089.184198] env[62503]: DEBUG nova.compute.manager [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] [instance: 5fb35ae6-bfc6-4039-aa43-de8c550aacde] Instance has had 0 of 5 cleanup attempts {{(pid=62503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11537}} [ 1089.359191] env[62503]: DEBUG nova.compute.manager [req-79f0b2c9-ee58-4151-b484-8f9c2eb55fe2 req-24ff2046-1b26-4f03-ab01-2f9d57605174 service nova] [instance: 477b21f4-cac1-48f1-862a-0b283b336d72] Received event network-vif-plugged-5a42bd1b-f70d-4aba-8070-8636cad420fc {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 1089.359443] env[62503]: DEBUG oslo_concurrency.lockutils [req-79f0b2c9-ee58-4151-b484-8f9c2eb55fe2 req-24ff2046-1b26-4f03-ab01-2f9d57605174 service nova] Acquiring lock "477b21f4-cac1-48f1-862a-0b283b336d72-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1089.361084] env[62503]: DEBUG oslo_concurrency.lockutils [req-79f0b2c9-ee58-4151-b484-8f9c2eb55fe2 req-24ff2046-1b26-4f03-ab01-2f9d57605174 service nova] Lock "477b21f4-cac1-48f1-862a-0b283b336d72-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.002s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1089.361289] env[62503]: DEBUG oslo_concurrency.lockutils [req-79f0b2c9-ee58-4151-b484-8f9c2eb55fe2 req-24ff2046-1b26-4f03-ab01-2f9d57605174 service nova] Lock "477b21f4-cac1-48f1-862a-0b283b336d72-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1089.361472] env[62503]: DEBUG nova.compute.manager [req-79f0b2c9-ee58-4151-b484-8f9c2eb55fe2 req-24ff2046-1b26-4f03-ab01-2f9d57605174 service nova] [instance: 477b21f4-cac1-48f1-862a-0b283b336d72] No waiting events found dispatching network-vif-plugged-5a42bd1b-f70d-4aba-8070-8636cad420fc {{(pid=62503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1089.361648] env[62503]: WARNING nova.compute.manager [req-79f0b2c9-ee58-4151-b484-8f9c2eb55fe2 req-24ff2046-1b26-4f03-ab01-2f9d57605174 service nova] [instance: 477b21f4-cac1-48f1-862a-0b283b336d72] Received unexpected event network-vif-plugged-5a42bd1b-f70d-4aba-8070-8636cad420fc for instance with vm_state building and task_state spawning. [ 1089.361818] env[62503]: DEBUG nova.compute.manager [req-79f0b2c9-ee58-4151-b484-8f9c2eb55fe2 req-24ff2046-1b26-4f03-ab01-2f9d57605174 service nova] [instance: 477b21f4-cac1-48f1-862a-0b283b336d72] Received event network-changed-5a42bd1b-f70d-4aba-8070-8636cad420fc {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 1089.362008] env[62503]: DEBUG nova.compute.manager [req-79f0b2c9-ee58-4151-b484-8f9c2eb55fe2 req-24ff2046-1b26-4f03-ab01-2f9d57605174 service nova] [instance: 477b21f4-cac1-48f1-862a-0b283b336d72] Refreshing instance network info cache due to event network-changed-5a42bd1b-f70d-4aba-8070-8636cad420fc. {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11432}} [ 1089.362195] env[62503]: DEBUG oslo_concurrency.lockutils [req-79f0b2c9-ee58-4151-b484-8f9c2eb55fe2 req-24ff2046-1b26-4f03-ab01-2f9d57605174 service nova] Acquiring lock "refresh_cache-477b21f4-cac1-48f1-862a-0b283b336d72" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1089.630521] env[62503]: DEBUG nova.network.neutron [None req-ec5c9272-eebd-4f44-b746-8bb6fbced38f tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: 477b21f4-cac1-48f1-862a-0b283b336d72] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1089.688104] env[62503]: DEBUG nova.compute.manager [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] [instance: d811353d-a484-4c15-abfa-3ebbd37816fc] Instance has had 0 of 5 cleanup attempts {{(pid=62503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11537}} [ 1089.774793] env[62503]: DEBUG nova.network.neutron [None req-ec5c9272-eebd-4f44-b746-8bb6fbced38f tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: 477b21f4-cac1-48f1-862a-0b283b336d72] Updating instance_info_cache with network_info: [{"id": "5a42bd1b-f70d-4aba-8070-8636cad420fc", "address": "fa:16:3e:48:43:03", "network": {"id": "ef338e2b-4aa8-4605-8748-910d0d3a3079", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-477257305-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "12a42517cf8f4ad3836f2f95e8833dd4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "13e71dbb-4279-427c-b39d-ba5df9895e58", "external-id": "nsx-vlan-transportzone-417", "segmentation_id": 417, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5a42bd1b-f7", "ovs_interfaceid": "5a42bd1b-f70d-4aba-8070-8636cad420fc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1090.191504] env[62503]: DEBUG nova.compute.manager [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] [instance: dd6341e2-cd68-4d12-80e7-51184d448764] Instance has had 0 of 5 cleanup attempts {{(pid=62503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11537}} [ 1090.277439] env[62503]: DEBUG oslo_concurrency.lockutils [None req-ec5c9272-eebd-4f44-b746-8bb6fbced38f tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Releasing lock "refresh_cache-477b21f4-cac1-48f1-862a-0b283b336d72" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1090.277808] env[62503]: DEBUG nova.compute.manager [None req-ec5c9272-eebd-4f44-b746-8bb6fbced38f tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: 477b21f4-cac1-48f1-862a-0b283b336d72] Instance network_info: |[{"id": "5a42bd1b-f70d-4aba-8070-8636cad420fc", "address": "fa:16:3e:48:43:03", "network": {"id": "ef338e2b-4aa8-4605-8748-910d0d3a3079", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-477257305-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "12a42517cf8f4ad3836f2f95e8833dd4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "13e71dbb-4279-427c-b39d-ba5df9895e58", "external-id": "nsx-vlan-transportzone-417", "segmentation_id": 417, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5a42bd1b-f7", "ovs_interfaceid": "5a42bd1b-f70d-4aba-8070-8636cad420fc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1999}} [ 1090.278161] env[62503]: DEBUG oslo_concurrency.lockutils [req-79f0b2c9-ee58-4151-b484-8f9c2eb55fe2 req-24ff2046-1b26-4f03-ab01-2f9d57605174 service nova] Acquired lock "refresh_cache-477b21f4-cac1-48f1-862a-0b283b336d72" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1090.278365] env[62503]: DEBUG nova.network.neutron [req-79f0b2c9-ee58-4151-b484-8f9c2eb55fe2 req-24ff2046-1b26-4f03-ab01-2f9d57605174 service nova] [instance: 477b21f4-cac1-48f1-862a-0b283b336d72] Refreshing network info cache for port 5a42bd1b-f70d-4aba-8070-8636cad420fc {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1090.279550] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-ec5c9272-eebd-4f44-b746-8bb6fbced38f tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: 477b21f4-cac1-48f1-862a-0b283b336d72] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:48:43:03', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '13e71dbb-4279-427c-b39d-ba5df9895e58', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5a42bd1b-f70d-4aba-8070-8636cad420fc', 'vif_model': 'vmxnet3'}] {{(pid=62503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1090.287268] env[62503]: DEBUG oslo.service.loopingcall [None req-ec5c9272-eebd-4f44-b746-8bb6fbced38f tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1090.288193] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 477b21f4-cac1-48f1-862a-0b283b336d72] Creating VM on the ESX host {{(pid=62503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1090.288424] env[62503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ccdd5fa7-4862-4c23-987c-73c6003c3b16 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.309655] env[62503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1090.309655] env[62503]: value = "task-1388253" [ 1090.309655] env[62503]: _type = "Task" [ 1090.309655] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1090.317536] env[62503]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388253, 'name': CreateVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.695232] env[62503]: DEBUG nova.compute.manager [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] [instance: 9a792b9f-51c3-4cef-a3b8-1e81866433ce] Instance has had 0 of 5 cleanup attempts {{(pid=62503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11537}} [ 1090.819417] env[62503]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388253, 'name': CreateVM_Task} progress is 25%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.020617] env[62503]: DEBUG nova.network.neutron [req-79f0b2c9-ee58-4151-b484-8f9c2eb55fe2 req-24ff2046-1b26-4f03-ab01-2f9d57605174 service nova] [instance: 477b21f4-cac1-48f1-862a-0b283b336d72] Updated VIF entry in instance network info cache for port 5a42bd1b-f70d-4aba-8070-8636cad420fc. {{(pid=62503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1091.021049] env[62503]: DEBUG nova.network.neutron [req-79f0b2c9-ee58-4151-b484-8f9c2eb55fe2 req-24ff2046-1b26-4f03-ab01-2f9d57605174 service nova] [instance: 477b21f4-cac1-48f1-862a-0b283b336d72] Updating instance_info_cache with network_info: [{"id": "5a42bd1b-f70d-4aba-8070-8636cad420fc", "address": "fa:16:3e:48:43:03", "network": {"id": "ef338e2b-4aa8-4605-8748-910d0d3a3079", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-477257305-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "12a42517cf8f4ad3836f2f95e8833dd4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "13e71dbb-4279-427c-b39d-ba5df9895e58", "external-id": "nsx-vlan-transportzone-417", "segmentation_id": 417, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5a42bd1b-f7", "ovs_interfaceid": "5a42bd1b-f70d-4aba-8070-8636cad420fc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1091.198021] env[62503]: DEBUG nova.compute.manager [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] [instance: c6961bd3-16fa-4476-9d9c-8e91f7c0bee3] Instance has had 0 of 5 cleanup attempts {{(pid=62503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11537}} [ 1091.320610] env[62503]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388253, 'name': CreateVM_Task, 'duration_secs': 0.994538} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1091.320807] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 477b21f4-cac1-48f1-862a-0b283b336d72] Created VM on the ESX host {{(pid=62503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1091.321555] env[62503]: DEBUG oslo_concurrency.lockutils [None req-ec5c9272-eebd-4f44-b746-8bb6fbced38f tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1091.321788] env[62503]: DEBUG oslo_concurrency.lockutils [None req-ec5c9272-eebd-4f44-b746-8bb6fbced38f tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1091.322207] env[62503]: DEBUG oslo_concurrency.lockutils [None req-ec5c9272-eebd-4f44-b746-8bb6fbced38f tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1091.322425] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2b868d18-9f48-48c5-a517-914160c17dae {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.327391] env[62503]: DEBUG oslo_vmware.api [None req-ec5c9272-eebd-4f44-b746-8bb6fbced38f tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Waiting for the task: (returnval){ [ 1091.327391] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]526996c0-f7b1-95c3-7ff7-fa2bcde50b07" [ 1091.327391] env[62503]: _type = "Task" [ 1091.327391] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1091.335328] env[62503]: DEBUG oslo_vmware.api [None req-ec5c9272-eebd-4f44-b746-8bb6fbced38f tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]526996c0-f7b1-95c3-7ff7-fa2bcde50b07, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.523903] env[62503]: DEBUG oslo_concurrency.lockutils [req-79f0b2c9-ee58-4151-b484-8f9c2eb55fe2 req-24ff2046-1b26-4f03-ab01-2f9d57605174 service nova] Releasing lock "refresh_cache-477b21f4-cac1-48f1-862a-0b283b336d72" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1091.700741] env[62503]: DEBUG nova.compute.manager [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] [instance: 81545f5b-a9ba-4d58-aaca-62a2652a8102] Instance has had 0 of 5 cleanup attempts {{(pid=62503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11537}} [ 1091.838482] env[62503]: DEBUG oslo_vmware.api [None req-ec5c9272-eebd-4f44-b746-8bb6fbced38f tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]526996c0-f7b1-95c3-7ff7-fa2bcde50b07, 'name': SearchDatastore_Task, 'duration_secs': 0.01142} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1091.838799] env[62503]: DEBUG oslo_concurrency.lockutils [None req-ec5c9272-eebd-4f44-b746-8bb6fbced38f tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1091.839053] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-ec5c9272-eebd-4f44-b746-8bb6fbced38f tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: 477b21f4-cac1-48f1-862a-0b283b336d72] Processing image 8150ca02-f879-471d-8913-459408f127a1 {{(pid=62503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1091.839295] env[62503]: DEBUG oslo_concurrency.lockutils [None req-ec5c9272-eebd-4f44-b746-8bb6fbced38f tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1091.839798] env[62503]: DEBUG oslo_concurrency.lockutils [None req-ec5c9272-eebd-4f44-b746-8bb6fbced38f tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1091.839798] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-ec5c9272-eebd-4f44-b746-8bb6fbced38f tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1091.839957] env[62503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d27ce6db-105a-4372-992e-56f7290fca5b {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.848038] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-ec5c9272-eebd-4f44-b746-8bb6fbced38f tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1091.848224] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-ec5c9272-eebd-4f44-b746-8bb6fbced38f tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1091.848915] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-de04c645-2bc4-4e52-accc-88818cb87b93 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.854952] env[62503]: DEBUG oslo_vmware.api [None req-ec5c9272-eebd-4f44-b746-8bb6fbced38f tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Waiting for the task: (returnval){ [ 1091.854952] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]520a1828-b6b6-d3bb-e734-7a40b99933a8" [ 1091.854952] env[62503]: _type = "Task" [ 1091.854952] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1091.862025] env[62503]: DEBUG oslo_vmware.api [None req-ec5c9272-eebd-4f44-b746-8bb6fbced38f tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]520a1828-b6b6-d3bb-e734-7a40b99933a8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1092.204646] env[62503]: DEBUG nova.compute.manager [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] [instance: c1a41261-03d3-4dde-9b90-68bdec1a548b] Instance has had 0 of 5 cleanup attempts {{(pid=62503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11537}} [ 1092.365772] env[62503]: DEBUG oslo_vmware.api [None req-ec5c9272-eebd-4f44-b746-8bb6fbced38f tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]520a1828-b6b6-d3bb-e734-7a40b99933a8, 'name': SearchDatastore_Task, 'duration_secs': 0.008727} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1092.366545] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0fa69f20-46d6-42f6-a9b5-31b3c13a1664 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.371491] env[62503]: DEBUG oslo_vmware.api [None req-ec5c9272-eebd-4f44-b746-8bb6fbced38f tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Waiting for the task: (returnval){ [ 1092.371491] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]524bbc48-5700-c1eb-9cf9-c53237b085e8" [ 1092.371491] env[62503]: _type = "Task" [ 1092.371491] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1092.379279] env[62503]: DEBUG oslo_vmware.api [None req-ec5c9272-eebd-4f44-b746-8bb6fbced38f tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]524bbc48-5700-c1eb-9cf9-c53237b085e8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1092.708084] env[62503]: DEBUG nova.compute.manager [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] [instance: ccc542a3-ff01-42ca-965e-706bed4c6e07] Instance has had 0 of 5 cleanup attempts {{(pid=62503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11537}} [ 1092.882679] env[62503]: DEBUG oslo_vmware.api [None req-ec5c9272-eebd-4f44-b746-8bb6fbced38f tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]524bbc48-5700-c1eb-9cf9-c53237b085e8, 'name': SearchDatastore_Task, 'duration_secs': 0.011445} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1092.882912] env[62503]: DEBUG oslo_concurrency.lockutils [None req-ec5c9272-eebd-4f44-b746-8bb6fbced38f tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1092.883168] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-ec5c9272-eebd-4f44-b746-8bb6fbced38f tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk to [datastore2] 477b21f4-cac1-48f1-862a-0b283b336d72/477b21f4-cac1-48f1-862a-0b283b336d72.vmdk {{(pid=62503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1092.883433] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-304bec4c-ed54-49b1-a0e6-50a625cd6469 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.890526] env[62503]: DEBUG oslo_vmware.api [None req-ec5c9272-eebd-4f44-b746-8bb6fbced38f tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Waiting for the task: (returnval){ [ 1092.890526] env[62503]: value = "task-1388255" [ 1092.890526] env[62503]: _type = "Task" [ 1092.890526] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1092.898059] env[62503]: DEBUG oslo_vmware.api [None req-ec5c9272-eebd-4f44-b746-8bb6fbced38f tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Task: {'id': task-1388255, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1093.212033] env[62503]: DEBUG nova.compute.manager [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] [instance: b120b29f-0e26-465f-bc6f-4214525ae2de] Instance has had 0 of 5 cleanup attempts {{(pid=62503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11537}} [ 1093.400881] env[62503]: DEBUG oslo_vmware.api [None req-ec5c9272-eebd-4f44-b746-8bb6fbced38f tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Task: {'id': task-1388255, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1093.715982] env[62503]: DEBUG nova.compute.manager [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] [instance: a4e7ec7f-82d1-4fd8-b9c4-bf869aa0968d] Instance has had 0 of 5 cleanup attempts {{(pid=62503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11537}} [ 1093.901246] env[62503]: DEBUG oslo_vmware.api [None req-ec5c9272-eebd-4f44-b746-8bb6fbced38f tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Task: {'id': task-1388255, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.513119} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1093.901517] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-ec5c9272-eebd-4f44-b746-8bb6fbced38f tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk to [datastore2] 477b21f4-cac1-48f1-862a-0b283b336d72/477b21f4-cac1-48f1-862a-0b283b336d72.vmdk {{(pid=62503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1093.901741] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-ec5c9272-eebd-4f44-b746-8bb6fbced38f tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: 477b21f4-cac1-48f1-862a-0b283b336d72] Extending root virtual disk to 1048576 {{(pid=62503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1093.901995] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-11db38c2-12ae-4acd-8f5a-1f9b64eb72c7 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.909174] env[62503]: DEBUG oslo_vmware.api [None req-ec5c9272-eebd-4f44-b746-8bb6fbced38f tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Waiting for the task: (returnval){ [ 1093.909174] env[62503]: value = "task-1388257" [ 1093.909174] env[62503]: _type = "Task" [ 1093.909174] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1093.918113] env[62503]: DEBUG oslo_vmware.api [None req-ec5c9272-eebd-4f44-b746-8bb6fbced38f tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Task: {'id': task-1388257, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1094.219649] env[62503]: DEBUG nova.compute.manager [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] [instance: bba6c92b-cac3-4677-a8f4-57a2704fc685] Instance has had 0 of 5 cleanup attempts {{(pid=62503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11537}} [ 1094.419645] env[62503]: DEBUG oslo_vmware.api [None req-ec5c9272-eebd-4f44-b746-8bb6fbced38f tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Task: {'id': task-1388257, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070794} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1094.419926] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-ec5c9272-eebd-4f44-b746-8bb6fbced38f tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: 477b21f4-cac1-48f1-862a-0b283b336d72] Extended root virtual disk {{(pid=62503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1094.420723] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8782ccc-7faa-4d3b-954c-bb93c7f3b3e0 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.443228] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-ec5c9272-eebd-4f44-b746-8bb6fbced38f tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: 477b21f4-cac1-48f1-862a-0b283b336d72] Reconfiguring VM instance instance-0000006b to attach disk [datastore2] 477b21f4-cac1-48f1-862a-0b283b336d72/477b21f4-cac1-48f1-862a-0b283b336d72.vmdk or device None with type sparse {{(pid=62503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1094.443494] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-53fba0f1-3c2f-4413-946b-9940b9b702ee {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.463330] env[62503]: DEBUG oslo_vmware.api [None req-ec5c9272-eebd-4f44-b746-8bb6fbced38f tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Waiting for the task: (returnval){ [ 1094.463330] env[62503]: value = "task-1388258" [ 1094.463330] env[62503]: _type = "Task" [ 1094.463330] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1094.471042] env[62503]: DEBUG oslo_vmware.api [None req-ec5c9272-eebd-4f44-b746-8bb6fbced38f tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Task: {'id': task-1388258, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1094.723681] env[62503]: DEBUG nova.compute.manager [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] [instance: 4d7f62b3-42d0-4f98-bac4-541f116c9709] Instance has had 0 of 5 cleanup attempts {{(pid=62503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11537}} [ 1094.973431] env[62503]: DEBUG oslo_vmware.api [None req-ec5c9272-eebd-4f44-b746-8bb6fbced38f tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Task: {'id': task-1388258, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1095.227021] env[62503]: DEBUG nova.compute.manager [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] [instance: 12dff44c-ebb9-4fa3-8396-defcdb474152] Instance has had 0 of 5 cleanup attempts {{(pid=62503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11537}} [ 1095.473311] env[62503]: DEBUG oslo_vmware.api [None req-ec5c9272-eebd-4f44-b746-8bb6fbced38f tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Task: {'id': task-1388258, 'name': ReconfigVM_Task, 'duration_secs': 0.751428} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1095.473606] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-ec5c9272-eebd-4f44-b746-8bb6fbced38f tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: 477b21f4-cac1-48f1-862a-0b283b336d72] Reconfigured VM instance instance-0000006b to attach disk [datastore2] 477b21f4-cac1-48f1-862a-0b283b336d72/477b21f4-cac1-48f1-862a-0b283b336d72.vmdk or device None with type sparse {{(pid=62503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1095.474271] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-95707722-473f-4577-84ad-639d4afce0f0 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.480930] env[62503]: DEBUG oslo_vmware.api [None req-ec5c9272-eebd-4f44-b746-8bb6fbced38f tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Waiting for the task: (returnval){ [ 1095.480930] env[62503]: value = "task-1388260" [ 1095.480930] env[62503]: _type = "Task" [ 1095.480930] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1095.488535] env[62503]: DEBUG oslo_vmware.api [None req-ec5c9272-eebd-4f44-b746-8bb6fbced38f tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Task: {'id': task-1388260, 'name': Rename_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1095.729998] env[62503]: DEBUG nova.compute.manager [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] [instance: 7eb8e049-dd65-43bd-829a-8f773f7ad156] Instance has had 0 of 5 cleanup attempts {{(pid=62503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11537}} [ 1095.990920] env[62503]: DEBUG oslo_vmware.api [None req-ec5c9272-eebd-4f44-b746-8bb6fbced38f tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Task: {'id': task-1388260, 'name': Rename_Task, 'duration_secs': 0.139614} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1095.993243] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-ec5c9272-eebd-4f44-b746-8bb6fbced38f tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: 477b21f4-cac1-48f1-862a-0b283b336d72] Powering on the VM {{(pid=62503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1095.993243] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6d97ba3e-3016-4410-bbac-ae85c30f25dc {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.998450] env[62503]: DEBUG oslo_vmware.api [None req-ec5c9272-eebd-4f44-b746-8bb6fbced38f tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Waiting for the task: (returnval){ [ 1095.998450] env[62503]: value = "task-1388261" [ 1095.998450] env[62503]: _type = "Task" [ 1095.998450] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1096.005720] env[62503]: DEBUG oslo_vmware.api [None req-ec5c9272-eebd-4f44-b746-8bb6fbced38f tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Task: {'id': task-1388261, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1096.233333] env[62503]: DEBUG nova.compute.manager [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] [instance: ce9c4d77-6cbe-411d-a0fd-d77a6d6f36b7] Instance has had 0 of 5 cleanup attempts {{(pid=62503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11537}} [ 1096.509165] env[62503]: DEBUG oslo_vmware.api [None req-ec5c9272-eebd-4f44-b746-8bb6fbced38f tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Task: {'id': task-1388261, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1096.736796] env[62503]: DEBUG nova.compute.manager [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] [instance: 68f0c60d-ceff-4d7a-b81d-4845b4c5134c] Instance has had 0 of 5 cleanup attempts {{(pid=62503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11537}} [ 1097.008785] env[62503]: DEBUG oslo_vmware.api [None req-ec5c9272-eebd-4f44-b746-8bb6fbced38f tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Task: {'id': task-1388261, 'name': PowerOnVM_Task, 'duration_secs': 1.005399} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1097.009017] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-ec5c9272-eebd-4f44-b746-8bb6fbced38f tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: 477b21f4-cac1-48f1-862a-0b283b336d72] Powered on the VM {{(pid=62503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1097.009236] env[62503]: INFO nova.compute.manager [None req-ec5c9272-eebd-4f44-b746-8bb6fbced38f tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: 477b21f4-cac1-48f1-862a-0b283b336d72] Took 9.05 seconds to spawn the instance on the hypervisor. [ 1097.009421] env[62503]: DEBUG nova.compute.manager [None req-ec5c9272-eebd-4f44-b746-8bb6fbced38f tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: 477b21f4-cac1-48f1-862a-0b283b336d72] Checking state {{(pid=62503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1794}} [ 1097.010213] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a521f386-78bf-4c49-8efc-08a303265824 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.240295] env[62503]: DEBUG nova.compute.manager [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] [instance: e0df0ce5-1e88-4a39-8911-529b235f5b88] Instance has had 0 of 5 cleanup attempts {{(pid=62503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11537}} [ 1097.527495] env[62503]: INFO nova.compute.manager [None req-ec5c9272-eebd-4f44-b746-8bb6fbced38f tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: 477b21f4-cac1-48f1-862a-0b283b336d72] Took 13.77 seconds to build instance. [ 1097.744498] env[62503]: DEBUG nova.compute.manager [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] [instance: 5ba614a3-17be-4069-8219-f88f4d27aab9] Instance has had 0 of 5 cleanup attempts {{(pid=62503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11537}} [ 1098.030732] env[62503]: DEBUG oslo_concurrency.lockutils [None req-ec5c9272-eebd-4f44-b746-8bb6fbced38f tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Lock "477b21f4-cac1-48f1-862a-0b283b336d72" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.277s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1098.133246] env[62503]: DEBUG nova.compute.manager [req-9a5d89e3-6ff8-4a8a-83b0-c3f88fdb2b6d req-40398ed8-a341-457c-a9c3-359be725b82c service nova] [instance: 477b21f4-cac1-48f1-862a-0b283b336d72] Received event network-changed-5a42bd1b-f70d-4aba-8070-8636cad420fc {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 1098.133448] env[62503]: DEBUG nova.compute.manager [req-9a5d89e3-6ff8-4a8a-83b0-c3f88fdb2b6d req-40398ed8-a341-457c-a9c3-359be725b82c service nova] [instance: 477b21f4-cac1-48f1-862a-0b283b336d72] Refreshing instance network info cache due to event network-changed-5a42bd1b-f70d-4aba-8070-8636cad420fc. {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11432}} [ 1098.134025] env[62503]: DEBUG oslo_concurrency.lockutils [req-9a5d89e3-6ff8-4a8a-83b0-c3f88fdb2b6d req-40398ed8-a341-457c-a9c3-359be725b82c service nova] Acquiring lock "refresh_cache-477b21f4-cac1-48f1-862a-0b283b336d72" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1098.134025] env[62503]: DEBUG oslo_concurrency.lockutils [req-9a5d89e3-6ff8-4a8a-83b0-c3f88fdb2b6d req-40398ed8-a341-457c-a9c3-359be725b82c service nova] Acquired lock "refresh_cache-477b21f4-cac1-48f1-862a-0b283b336d72" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1098.134025] env[62503]: DEBUG nova.network.neutron [req-9a5d89e3-6ff8-4a8a-83b0-c3f88fdb2b6d req-40398ed8-a341-457c-a9c3-359be725b82c service nova] [instance: 477b21f4-cac1-48f1-862a-0b283b336d72] Refreshing network info cache for port 5a42bd1b-f70d-4aba-8070-8636cad420fc {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1098.248789] env[62503]: DEBUG nova.compute.manager [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] [instance: 09ff6008-f1eb-4ee0-af7d-c7f8268e3eb9] Instance has had 0 of 5 cleanup attempts {{(pid=62503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11537}} [ 1098.605514] env[62503]: DEBUG oslo_concurrency.lockutils [None req-e12ff00d-4a58-4323-bd46-d846d246e790 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Acquiring lock "b7c00736-b0c7-4f69-b47b-f904f84a8a2f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1098.605760] env[62503]: DEBUG oslo_concurrency.lockutils [None req-e12ff00d-4a58-4323-bd46-d846d246e790 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Lock "b7c00736-b0c7-4f69-b47b-f904f84a8a2f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1098.752108] env[62503]: DEBUG nova.compute.manager [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] [instance: 35bd28b5-101e-429f-8487-fbe5bf3528fb] Instance has had 0 of 5 cleanup attempts {{(pid=62503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11537}} [ 1098.855660] env[62503]: DEBUG nova.network.neutron [req-9a5d89e3-6ff8-4a8a-83b0-c3f88fdb2b6d req-40398ed8-a341-457c-a9c3-359be725b82c service nova] [instance: 477b21f4-cac1-48f1-862a-0b283b336d72] Updated VIF entry in instance network info cache for port 5a42bd1b-f70d-4aba-8070-8636cad420fc. {{(pid=62503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1098.856070] env[62503]: DEBUG nova.network.neutron [req-9a5d89e3-6ff8-4a8a-83b0-c3f88fdb2b6d req-40398ed8-a341-457c-a9c3-359be725b82c service nova] [instance: 477b21f4-cac1-48f1-862a-0b283b336d72] Updating instance_info_cache with network_info: [{"id": "5a42bd1b-f70d-4aba-8070-8636cad420fc", "address": "fa:16:3e:48:43:03", "network": {"id": "ef338e2b-4aa8-4605-8748-910d0d3a3079", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-477257305-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.198", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "12a42517cf8f4ad3836f2f95e8833dd4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "13e71dbb-4279-427c-b39d-ba5df9895e58", "external-id": "nsx-vlan-transportzone-417", "segmentation_id": 417, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5a42bd1b-f7", "ovs_interfaceid": "5a42bd1b-f70d-4aba-8070-8636cad420fc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1099.107785] env[62503]: DEBUG nova.compute.manager [None req-e12ff00d-4a58-4323-bd46-d846d246e790 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: b7c00736-b0c7-4f69-b47b-f904f84a8a2f] Starting instance... {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 1099.255670] env[62503]: DEBUG nova.compute.manager [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] [instance: c02c01b0-6f69-424f-8c0f-5a5c9ce1fad7] Instance has had 0 of 5 cleanup attempts {{(pid=62503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11537}} [ 1099.358461] env[62503]: DEBUG oslo_concurrency.lockutils [req-9a5d89e3-6ff8-4a8a-83b0-c3f88fdb2b6d req-40398ed8-a341-457c-a9c3-359be725b82c service nova] Releasing lock "refresh_cache-477b21f4-cac1-48f1-862a-0b283b336d72" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1099.744541] env[62503]: DEBUG oslo_concurrency.lockutils [None req-e12ff00d-4a58-4323-bd46-d846d246e790 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1099.744813] env[62503]: DEBUG oslo_concurrency.lockutils [None req-e12ff00d-4a58-4323-bd46-d846d246e790 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1099.746421] env[62503]: INFO nova.compute.claims [None req-e12ff00d-4a58-4323-bd46-d846d246e790 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: b7c00736-b0c7-4f69-b47b-f904f84a8a2f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1099.758365] env[62503]: DEBUG nova.compute.manager [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] [instance: 04054a79-70b6-409a-981f-6bf99fc3b4fc] Instance has had 0 of 5 cleanup attempts {{(pid=62503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11537}} [ 1100.261497] env[62503]: DEBUG nova.compute.manager [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] [instance: e7556915-634f-40d6-9e7f-da1c3201d8e4] Instance has had 0 of 5 cleanup attempts {{(pid=62503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11537}} [ 1100.765272] env[62503]: DEBUG nova.compute.manager [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] [instance: c09488ed-e354-4abf-8999-b2f8afec44fc] Instance has had 0 of 5 cleanup attempts {{(pid=62503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11537}} [ 1100.838012] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a12e46a-ea25-4140-b2ae-c8a24a5877ee {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.846349] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0ac0b08-9b5c-4761-b74b-c56fb5065f9b {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.879457] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c67ab663-b5a0-49a4-b11c-eb33b1c46d1b {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.887486] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfa8a1f1-f994-42cf-81b8-8aced9921b92 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.900964] env[62503]: DEBUG nova.compute.provider_tree [None req-e12ff00d-4a58-4323-bd46-d846d246e790 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1101.270210] env[62503]: DEBUG nova.compute.manager [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] [instance: 7b8c670d-3f2a-431d-91da-4ced781e6e51] Instance has had 0 of 5 cleanup attempts {{(pid=62503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11537}} [ 1101.404235] env[62503]: DEBUG nova.scheduler.client.report [None req-e12ff00d-4a58-4323-bd46-d846d246e790 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1101.774076] env[62503]: DEBUG nova.compute.manager [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] [instance: c39e7ee3-1b97-44ec-92d6-733976c0f0f8] Instance has had 0 of 5 cleanup attempts {{(pid=62503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11537}} [ 1101.908646] env[62503]: DEBUG oslo_concurrency.lockutils [None req-e12ff00d-4a58-4323-bd46-d846d246e790 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.164s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1101.909066] env[62503]: DEBUG nova.compute.manager [None req-e12ff00d-4a58-4323-bd46-d846d246e790 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: b7c00736-b0c7-4f69-b47b-f904f84a8a2f] Start building networks asynchronously for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2832}} [ 1102.277203] env[62503]: DEBUG nova.compute.manager [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] [instance: 629054bb-8fdb-45a2-8c07-216c4104d4a6] Instance has had 0 of 5 cleanup attempts {{(pid=62503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11537}} [ 1102.415239] env[62503]: DEBUG nova.compute.utils [None req-e12ff00d-4a58-4323-bd46-d846d246e790 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Using /dev/sd instead of None {{(pid=62503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1102.416985] env[62503]: DEBUG nova.compute.manager [None req-e12ff00d-4a58-4323-bd46-d846d246e790 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: b7c00736-b0c7-4f69-b47b-f904f84a8a2f] Allocating IP information in the background. {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 1102.416985] env[62503]: DEBUG nova.network.neutron [None req-e12ff00d-4a58-4323-bd46-d846d246e790 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: b7c00736-b0c7-4f69-b47b-f904f84a8a2f] allocate_for_instance() {{(pid=62503) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1102.465817] env[62503]: DEBUG nova.policy [None req-e12ff00d-4a58-4323-bd46-d846d246e790 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b076e358f78e4874876f90d96fd612e5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e818e5ee9dc24efa96747c9558514a15', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62503) authorize /opt/stack/nova/nova/policy.py:201}} [ 1102.731548] env[62503]: DEBUG nova.network.neutron [None req-e12ff00d-4a58-4323-bd46-d846d246e790 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: b7c00736-b0c7-4f69-b47b-f904f84a8a2f] Successfully created port: 05ae8546-f74a-4c0e-8dcf-be609104f0ba {{(pid=62503) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1102.780318] env[62503]: DEBUG nova.compute.manager [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] [instance: ca1f1966-bfe1-495e-b055-f72150f72470] Instance has had 0 of 5 cleanup attempts {{(pid=62503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11537}} [ 1102.919725] env[62503]: DEBUG nova.compute.manager [None req-e12ff00d-4a58-4323-bd46-d846d246e790 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: b7c00736-b0c7-4f69-b47b-f904f84a8a2f] Start building block device mappings for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2867}} [ 1103.283216] env[62503]: DEBUG nova.compute.manager [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] [instance: c9129f68-c755-4b78-b067-b77b01048c02] Instance has had 0 of 5 cleanup attempts {{(pid=62503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11537}} [ 1103.425815] env[62503]: INFO nova.virt.block_device [None req-e12ff00d-4a58-4323-bd46-d846d246e790 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: b7c00736-b0c7-4f69-b47b-f904f84a8a2f] Booting with volume 70581b1b-a294-4141-b6cc-480701c30979 at /dev/sda [ 1103.461628] env[62503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-163393fb-ec4e-448f-89d2-b7d319b2de20 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.472119] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e68c4eb4-1011-4c4b-ade6-331d513e71cc {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.507129] env[62503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9d3360ea-d9d3-447b-a8f6-bed6ee484c8a {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.518412] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2cab161-63e9-4d33-a2d3-2871619317d8 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.554078] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7566337a-4b05-4926-acda-f1ed32b32cbe {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.560851] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-032ccb0d-d6b0-4020-9c29-88d28821c3df {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.578047] env[62503]: DEBUG nova.virt.block_device [None req-e12ff00d-4a58-4323-bd46-d846d246e790 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: b7c00736-b0c7-4f69-b47b-f904f84a8a2f] Updating existing volume attachment record: e0480376-bcbf-4256-aa85-786ca0c55cda {{(pid=62503) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1103.787091] env[62503]: DEBUG nova.compute.manager [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] [instance: e693bcc2-3883-466d-913c-831146ca81e7] Instance has had 0 of 5 cleanup attempts {{(pid=62503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11537}} [ 1104.125787] env[62503]: DEBUG nova.compute.manager [req-2ffedaed-6b8f-440f-bbd3-68b2fadf8e1a req-50cd70a1-1239-47ab-8575-1e391cf7e203 service nova] [instance: b7c00736-b0c7-4f69-b47b-f904f84a8a2f] Received event network-vif-plugged-05ae8546-f74a-4c0e-8dcf-be609104f0ba {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 1104.126069] env[62503]: DEBUG oslo_concurrency.lockutils [req-2ffedaed-6b8f-440f-bbd3-68b2fadf8e1a req-50cd70a1-1239-47ab-8575-1e391cf7e203 service nova] Acquiring lock "b7c00736-b0c7-4f69-b47b-f904f84a8a2f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1104.126257] env[62503]: DEBUG oslo_concurrency.lockutils [req-2ffedaed-6b8f-440f-bbd3-68b2fadf8e1a req-50cd70a1-1239-47ab-8575-1e391cf7e203 service nova] Lock "b7c00736-b0c7-4f69-b47b-f904f84a8a2f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1104.126465] env[62503]: DEBUG oslo_concurrency.lockutils [req-2ffedaed-6b8f-440f-bbd3-68b2fadf8e1a req-50cd70a1-1239-47ab-8575-1e391cf7e203 service nova] Lock "b7c00736-b0c7-4f69-b47b-f904f84a8a2f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1104.126592] env[62503]: DEBUG nova.compute.manager [req-2ffedaed-6b8f-440f-bbd3-68b2fadf8e1a req-50cd70a1-1239-47ab-8575-1e391cf7e203 service nova] [instance: b7c00736-b0c7-4f69-b47b-f904f84a8a2f] No waiting events found dispatching network-vif-plugged-05ae8546-f74a-4c0e-8dcf-be609104f0ba {{(pid=62503) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1104.126754] env[62503]: WARNING nova.compute.manager [req-2ffedaed-6b8f-440f-bbd3-68b2fadf8e1a req-50cd70a1-1239-47ab-8575-1e391cf7e203 service nova] [instance: b7c00736-b0c7-4f69-b47b-f904f84a8a2f] Received unexpected event network-vif-plugged-05ae8546-f74a-4c0e-8dcf-be609104f0ba for instance with vm_state building and task_state block_device_mapping. [ 1104.203457] env[62503]: DEBUG nova.network.neutron [None req-e12ff00d-4a58-4323-bd46-d846d246e790 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: b7c00736-b0c7-4f69-b47b-f904f84a8a2f] Successfully updated port: 05ae8546-f74a-4c0e-8dcf-be609104f0ba {{(pid=62503) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1104.290871] env[62503]: DEBUG nova.compute.manager [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] [instance: cf611345-d276-4745-a2f8-0551c9dca2c2] Instance has had 0 of 5 cleanup attempts {{(pid=62503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11537}} [ 1104.708638] env[62503]: DEBUG oslo_concurrency.lockutils [None req-e12ff00d-4a58-4323-bd46-d846d246e790 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Acquiring lock "refresh_cache-b7c00736-b0c7-4f69-b47b-f904f84a8a2f" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1104.708824] env[62503]: DEBUG oslo_concurrency.lockutils [None req-e12ff00d-4a58-4323-bd46-d846d246e790 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Acquired lock "refresh_cache-b7c00736-b0c7-4f69-b47b-f904f84a8a2f" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1104.708955] env[62503]: DEBUG nova.network.neutron [None req-e12ff00d-4a58-4323-bd46-d846d246e790 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: b7c00736-b0c7-4f69-b47b-f904f84a8a2f] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1104.794853] env[62503]: DEBUG nova.compute.manager [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] [instance: 6229dda6-90e8-457b-beb3-2107e3700b29] Instance has had 0 of 5 cleanup attempts {{(pid=62503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11537}} [ 1105.239040] env[62503]: DEBUG nova.network.neutron [None req-e12ff00d-4a58-4323-bd46-d846d246e790 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: b7c00736-b0c7-4f69-b47b-f904f84a8a2f] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1105.298466] env[62503]: DEBUG nova.compute.manager [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] [instance: 9ccdc727-536e-4db8-bad4-960858254758] Instance has had 0 of 5 cleanup attempts {{(pid=62503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11537}} [ 1105.375441] env[62503]: DEBUG nova.network.neutron [None req-e12ff00d-4a58-4323-bd46-d846d246e790 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: b7c00736-b0c7-4f69-b47b-f904f84a8a2f] Updating instance_info_cache with network_info: [{"id": "05ae8546-f74a-4c0e-8dcf-be609104f0ba", "address": "fa:16:3e:4e:ae:77", "network": {"id": "1705446b-db63-4b14-9929-b692ca586b36", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1437181237-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e818e5ee9dc24efa96747c9558514a15", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "77aa121f-8fb6-42f3-aaea-43addfe449b2", "external-id": "nsx-vlan-transportzone-288", "segmentation_id": 288, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap05ae8546-f7", "ovs_interfaceid": "05ae8546-f74a-4c0e-8dcf-be609104f0ba", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1105.656898] env[62503]: DEBUG nova.compute.manager [None req-e12ff00d-4a58-4323-bd46-d846d246e790 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: b7c00736-b0c7-4f69-b47b-f904f84a8a2f] Start spawning the instance on the hypervisor. {{(pid=62503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2641}} [ 1105.657520] env[62503]: DEBUG nova.virt.hardware [None req-e12ff00d-4a58-4323-bd46-d846d246e790 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:26:20Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1105.657756] env[62503]: DEBUG nova.virt.hardware [None req-e12ff00d-4a58-4323-bd46-d846d246e790 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Flavor limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1105.657924] env[62503]: DEBUG nova.virt.hardware [None req-e12ff00d-4a58-4323-bd46-d846d246e790 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Image limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1105.658131] env[62503]: DEBUG nova.virt.hardware [None req-e12ff00d-4a58-4323-bd46-d846d246e790 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Flavor pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1105.658283] env[62503]: DEBUG nova.virt.hardware [None req-e12ff00d-4a58-4323-bd46-d846d246e790 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Image pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1105.658435] env[62503]: DEBUG nova.virt.hardware [None req-e12ff00d-4a58-4323-bd46-d846d246e790 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1105.658641] env[62503]: DEBUG nova.virt.hardware [None req-e12ff00d-4a58-4323-bd46-d846d246e790 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1105.658805] env[62503]: DEBUG nova.virt.hardware [None req-e12ff00d-4a58-4323-bd46-d846d246e790 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1105.658981] env[62503]: DEBUG nova.virt.hardware [None req-e12ff00d-4a58-4323-bd46-d846d246e790 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Got 1 possible topologies {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1105.659163] env[62503]: DEBUG nova.virt.hardware [None req-e12ff00d-4a58-4323-bd46-d846d246e790 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1105.659340] env[62503]: DEBUG nova.virt.hardware [None req-e12ff00d-4a58-4323-bd46-d846d246e790 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1105.660228] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ea4dccd-750e-4524-a2c3-523855aed293 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.669344] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de0fa0ac-885f-4f04-9220-21f3a95b227b {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.802023] env[62503]: DEBUG nova.compute.manager [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] [instance: ef92e4ba-4ef3-4e26-9577-bad0c046ed47] Instance has had 0 of 5 cleanup attempts {{(pid=62503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11537}} [ 1105.877980] env[62503]: DEBUG oslo_concurrency.lockutils [None req-e12ff00d-4a58-4323-bd46-d846d246e790 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Releasing lock "refresh_cache-b7c00736-b0c7-4f69-b47b-f904f84a8a2f" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1105.878326] env[62503]: DEBUG nova.compute.manager [None req-e12ff00d-4a58-4323-bd46-d846d246e790 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: b7c00736-b0c7-4f69-b47b-f904f84a8a2f] Instance network_info: |[{"id": "05ae8546-f74a-4c0e-8dcf-be609104f0ba", "address": "fa:16:3e:4e:ae:77", "network": {"id": "1705446b-db63-4b14-9929-b692ca586b36", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1437181237-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e818e5ee9dc24efa96747c9558514a15", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "77aa121f-8fb6-42f3-aaea-43addfe449b2", "external-id": "nsx-vlan-transportzone-288", "segmentation_id": 288, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap05ae8546-f7", "ovs_interfaceid": "05ae8546-f74a-4c0e-8dcf-be609104f0ba", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1999}} [ 1105.878760] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-e12ff00d-4a58-4323-bd46-d846d246e790 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: b7c00736-b0c7-4f69-b47b-f904f84a8a2f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4e:ae:77', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '77aa121f-8fb6-42f3-aaea-43addfe449b2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '05ae8546-f74a-4c0e-8dcf-be609104f0ba', 'vif_model': 'vmxnet3'}] {{(pid=62503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1105.886271] env[62503]: DEBUG oslo.service.loopingcall [None req-e12ff00d-4a58-4323-bd46-d846d246e790 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1105.886478] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b7c00736-b0c7-4f69-b47b-f904f84a8a2f] Creating VM on the ESX host {{(pid=62503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1105.886701] env[62503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5d378857-858c-43d3-af75-dea094518884 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.908374] env[62503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1105.908374] env[62503]: value = "task-1388262" [ 1105.908374] env[62503]: _type = "Task" [ 1105.908374] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1105.916266] env[62503]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388262, 'name': CreateVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1106.151961] env[62503]: DEBUG nova.compute.manager [req-ac0e223d-c91d-46d5-ace0-043d262f7790 req-f3685804-f98c-4dd8-aba8-782cf6bfe101 service nova] [instance: b7c00736-b0c7-4f69-b47b-f904f84a8a2f] Received event network-changed-05ae8546-f74a-4c0e-8dcf-be609104f0ba {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 1106.152197] env[62503]: DEBUG nova.compute.manager [req-ac0e223d-c91d-46d5-ace0-043d262f7790 req-f3685804-f98c-4dd8-aba8-782cf6bfe101 service nova] [instance: b7c00736-b0c7-4f69-b47b-f904f84a8a2f] Refreshing instance network info cache due to event network-changed-05ae8546-f74a-4c0e-8dcf-be609104f0ba. {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11432}} [ 1106.152434] env[62503]: DEBUG oslo_concurrency.lockutils [req-ac0e223d-c91d-46d5-ace0-043d262f7790 req-f3685804-f98c-4dd8-aba8-782cf6bfe101 service nova] Acquiring lock "refresh_cache-b7c00736-b0c7-4f69-b47b-f904f84a8a2f" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1106.152608] env[62503]: DEBUG oslo_concurrency.lockutils [req-ac0e223d-c91d-46d5-ace0-043d262f7790 req-f3685804-f98c-4dd8-aba8-782cf6bfe101 service nova] Acquired lock "refresh_cache-b7c00736-b0c7-4f69-b47b-f904f84a8a2f" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1106.152835] env[62503]: DEBUG nova.network.neutron [req-ac0e223d-c91d-46d5-ace0-043d262f7790 req-f3685804-f98c-4dd8-aba8-782cf6bfe101 service nova] [instance: b7c00736-b0c7-4f69-b47b-f904f84a8a2f] Refreshing network info cache for port 05ae8546-f74a-4c0e-8dcf-be609104f0ba {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1106.305387] env[62503]: DEBUG nova.compute.manager [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] [instance: 86422990-4215-4628-a7a7-4fdc910e304e] Instance has had 0 of 5 cleanup attempts {{(pid=62503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11537}} [ 1106.419016] env[62503]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388262, 'name': CreateVM_Task, 'duration_secs': 0.31457} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1106.419188] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b7c00736-b0c7-4f69-b47b-f904f84a8a2f] Created VM on the ESX host {{(pid=62503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1106.419870] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-e12ff00d-4a58-4323-bd46-d846d246e790 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: b7c00736-b0c7-4f69-b47b-f904f84a8a2f] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'boot_index': 0, 'delete_on_termination': True, 'device_type': None, 'guest_format': None, 'attachment_id': 'e0480376-bcbf-4256-aa85-786ca0c55cda', 'mount_device': '/dev/sda', 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-294655', 'volume_id': '70581b1b-a294-4141-b6cc-480701c30979', 'name': 'volume-70581b1b-a294-4141-b6cc-480701c30979', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'b7c00736-b0c7-4f69-b47b-f904f84a8a2f', 'attached_at': '', 'detached_at': '', 'volume_id': '70581b1b-a294-4141-b6cc-480701c30979', 'serial': '70581b1b-a294-4141-b6cc-480701c30979'}, 'disk_bus': None, 'volume_type': None}], 'swap': None} {{(pid=62503) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1106.420101] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-e12ff00d-4a58-4323-bd46-d846d246e790 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: b7c00736-b0c7-4f69-b47b-f904f84a8a2f] Root volume attach. Driver type: vmdk {{(pid=62503) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 1106.420885] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa298eaa-0fe1-4d2d-80de-a33e933ea447 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.427885] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cc99880-e36d-4171-a9b1-bdbfbd9f888c {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.433462] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad3f0f82-fa67-4c62-bb9c-4f83a5051a9d {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.438903] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-ce23ec1b-1e70-4baf-a2da-e2eb5f2210c5 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.445439] env[62503]: DEBUG oslo_vmware.api [None req-e12ff00d-4a58-4323-bd46-d846d246e790 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Waiting for the task: (returnval){ [ 1106.445439] env[62503]: value = "task-1388263" [ 1106.445439] env[62503]: _type = "Task" [ 1106.445439] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1106.452545] env[62503]: DEBUG oslo_vmware.api [None req-e12ff00d-4a58-4323-bd46-d846d246e790 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1388263, 'name': RelocateVM_Task} progress is 5%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1106.808488] env[62503]: DEBUG nova.compute.manager [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] [instance: d4990c66-63d5-43b0-8187-2074c99ccde2] Instance has had 0 of 5 cleanup attempts {{(pid=62503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11537}} [ 1106.842749] env[62503]: DEBUG nova.network.neutron [req-ac0e223d-c91d-46d5-ace0-043d262f7790 req-f3685804-f98c-4dd8-aba8-782cf6bfe101 service nova] [instance: b7c00736-b0c7-4f69-b47b-f904f84a8a2f] Updated VIF entry in instance network info cache for port 05ae8546-f74a-4c0e-8dcf-be609104f0ba. {{(pid=62503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1106.843139] env[62503]: DEBUG nova.network.neutron [req-ac0e223d-c91d-46d5-ace0-043d262f7790 req-f3685804-f98c-4dd8-aba8-782cf6bfe101 service nova] [instance: b7c00736-b0c7-4f69-b47b-f904f84a8a2f] Updating instance_info_cache with network_info: [{"id": "05ae8546-f74a-4c0e-8dcf-be609104f0ba", "address": "fa:16:3e:4e:ae:77", "network": {"id": "1705446b-db63-4b14-9929-b692ca586b36", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1437181237-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e818e5ee9dc24efa96747c9558514a15", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "77aa121f-8fb6-42f3-aaea-43addfe449b2", "external-id": "nsx-vlan-transportzone-288", "segmentation_id": 288, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap05ae8546-f7", "ovs_interfaceid": "05ae8546-f74a-4c0e-8dcf-be609104f0ba", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1106.956017] env[62503]: DEBUG oslo_vmware.api [None req-e12ff00d-4a58-4323-bd46-d846d246e790 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1388263, 'name': RelocateVM_Task, 'duration_secs': 0.346057} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1106.956282] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-e12ff00d-4a58-4323-bd46-d846d246e790 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: b7c00736-b0c7-4f69-b47b-f904f84a8a2f] Volume attach. Driver type: vmdk {{(pid=62503) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1106.956593] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-e12ff00d-4a58-4323-bd46-d846d246e790 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: b7c00736-b0c7-4f69-b47b-f904f84a8a2f] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-294655', 'volume_id': '70581b1b-a294-4141-b6cc-480701c30979', 'name': 'volume-70581b1b-a294-4141-b6cc-480701c30979', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'b7c00736-b0c7-4f69-b47b-f904f84a8a2f', 'attached_at': '', 'detached_at': '', 'volume_id': '70581b1b-a294-4141-b6cc-480701c30979', 'serial': '70581b1b-a294-4141-b6cc-480701c30979'} {{(pid=62503) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1106.957348] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c9f6a3e-ab67-4ed6-ba76-108f812b7153 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.972617] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c111d01-f5ab-4d51-be23-9841f398bd95 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.993703] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-e12ff00d-4a58-4323-bd46-d846d246e790 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: b7c00736-b0c7-4f69-b47b-f904f84a8a2f] Reconfiguring VM instance instance-0000006c to attach disk [datastore2] volume-70581b1b-a294-4141-b6cc-480701c30979/volume-70581b1b-a294-4141-b6cc-480701c30979.vmdk or device None with type thin {{(pid=62503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1106.993959] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5309a692-21d7-4e50-b9ee-aa0ab5748757 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.014954] env[62503]: DEBUG oslo_vmware.api [None req-e12ff00d-4a58-4323-bd46-d846d246e790 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Waiting for the task: (returnval){ [ 1107.014954] env[62503]: value = "task-1388264" [ 1107.014954] env[62503]: _type = "Task" [ 1107.014954] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1107.022039] env[62503]: DEBUG oslo_vmware.api [None req-e12ff00d-4a58-4323-bd46-d846d246e790 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1388264, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1107.314090] env[62503]: DEBUG nova.compute.manager [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] [instance: 141d7d04-0267-4e15-90ed-112ac8fb8c9b] Instance has had 0 of 5 cleanup attempts {{(pid=62503) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11537}} [ 1107.345728] env[62503]: DEBUG oslo_concurrency.lockutils [req-ac0e223d-c91d-46d5-ace0-043d262f7790 req-f3685804-f98c-4dd8-aba8-782cf6bfe101 service nova] Releasing lock "refresh_cache-b7c00736-b0c7-4f69-b47b-f904f84a8a2f" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1107.525575] env[62503]: DEBUG oslo_vmware.api [None req-e12ff00d-4a58-4323-bd46-d846d246e790 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1388264, 'name': ReconfigVM_Task, 'duration_secs': 0.294979} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1107.525641] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-e12ff00d-4a58-4323-bd46-d846d246e790 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: b7c00736-b0c7-4f69-b47b-f904f84a8a2f] Reconfigured VM instance instance-0000006c to attach disk [datastore2] volume-70581b1b-a294-4141-b6cc-480701c30979/volume-70581b1b-a294-4141-b6cc-480701c30979.vmdk or device None with type thin {{(pid=62503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1107.530182] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-53157dbf-8d09-4b20-9d99-3ff9da5c9ecb {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.545888] env[62503]: DEBUG oslo_vmware.api [None req-e12ff00d-4a58-4323-bd46-d846d246e790 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Waiting for the task: (returnval){ [ 1107.545888] env[62503]: value = "task-1388265" [ 1107.545888] env[62503]: _type = "Task" [ 1107.545888] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1107.554066] env[62503]: DEBUG oslo_vmware.api [None req-e12ff00d-4a58-4323-bd46-d846d246e790 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1388265, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1107.817218] env[62503]: DEBUG oslo_service.periodic_task [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=62503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1108.055716] env[62503]: DEBUG oslo_vmware.api [None req-e12ff00d-4a58-4323-bd46-d846d246e790 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1388265, 'name': ReconfigVM_Task, 'duration_secs': 0.133803} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1108.056024] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-e12ff00d-4a58-4323-bd46-d846d246e790 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: b7c00736-b0c7-4f69-b47b-f904f84a8a2f] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-294655', 'volume_id': '70581b1b-a294-4141-b6cc-480701c30979', 'name': 'volume-70581b1b-a294-4141-b6cc-480701c30979', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'b7c00736-b0c7-4f69-b47b-f904f84a8a2f', 'attached_at': '', 'detached_at': '', 'volume_id': '70581b1b-a294-4141-b6cc-480701c30979', 'serial': '70581b1b-a294-4141-b6cc-480701c30979'} {{(pid=62503) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1108.056538] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9472296a-705c-4905-927d-23f33f4340e4 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.064061] env[62503]: DEBUG oslo_vmware.api [None req-e12ff00d-4a58-4323-bd46-d846d246e790 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Waiting for the task: (returnval){ [ 1108.064061] env[62503]: value = "task-1388266" [ 1108.064061] env[62503]: _type = "Task" [ 1108.064061] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1108.071834] env[62503]: DEBUG oslo_vmware.api [None req-e12ff00d-4a58-4323-bd46-d846d246e790 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1388266, 'name': Rename_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1108.574329] env[62503]: DEBUG oslo_vmware.api [None req-e12ff00d-4a58-4323-bd46-d846d246e790 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1388266, 'name': Rename_Task, 'duration_secs': 0.139511} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1108.574609] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-e12ff00d-4a58-4323-bd46-d846d246e790 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: b7c00736-b0c7-4f69-b47b-f904f84a8a2f] Powering on the VM {{(pid=62503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1108.574848] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a5ce6264-c96b-4f36-9c56-4138c8fab22f {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.581918] env[62503]: DEBUG oslo_vmware.api [None req-e12ff00d-4a58-4323-bd46-d846d246e790 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Waiting for the task: (returnval){ [ 1108.581918] env[62503]: value = "task-1388267" [ 1108.581918] env[62503]: _type = "Task" [ 1108.581918] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1108.588967] env[62503]: DEBUG oslo_vmware.api [None req-e12ff00d-4a58-4323-bd46-d846d246e790 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1388267, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1109.092291] env[62503]: DEBUG oslo_vmware.api [None req-e12ff00d-4a58-4323-bd46-d846d246e790 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1388267, 'name': PowerOnVM_Task, 'duration_secs': 0.475728} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1109.092531] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-e12ff00d-4a58-4323-bd46-d846d246e790 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: b7c00736-b0c7-4f69-b47b-f904f84a8a2f] Powered on the VM {{(pid=62503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1109.092762] env[62503]: INFO nova.compute.manager [None req-e12ff00d-4a58-4323-bd46-d846d246e790 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: b7c00736-b0c7-4f69-b47b-f904f84a8a2f] Took 3.44 seconds to spawn the instance on the hypervisor. [ 1109.092961] env[62503]: DEBUG nova.compute.manager [None req-e12ff00d-4a58-4323-bd46-d846d246e790 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: b7c00736-b0c7-4f69-b47b-f904f84a8a2f] Checking state {{(pid=62503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1794}} [ 1109.093745] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a65124b6-b34a-4262-8196-1d072570ed19 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.613038] env[62503]: INFO nova.compute.manager [None req-e12ff00d-4a58-4323-bd46-d846d246e790 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: b7c00736-b0c7-4f69-b47b-f904f84a8a2f] Took 10.00 seconds to build instance. [ 1109.816297] env[62503]: DEBUG oslo_service.periodic_task [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1109.816524] env[62503]: DEBUG oslo_service.periodic_task [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1110.115155] env[62503]: DEBUG oslo_concurrency.lockutils [None req-e12ff00d-4a58-4323-bd46-d846d246e790 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Lock "b7c00736-b0c7-4f69-b47b-f904f84a8a2f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 11.509s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1110.322172] env[62503]: DEBUG oslo_service.periodic_task [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1110.322331] env[62503]: DEBUG nova.compute.manager [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Starting heal instance info cache {{(pid=62503) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10224}} [ 1110.323321] env[62503]: DEBUG nova.compute.manager [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Rebuilding the list of instances to heal {{(pid=62503) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10228}} [ 1110.847951] env[62503]: DEBUG nova.compute.manager [req-9fccfd88-e00f-4817-bade-e52eb525faa7 req-67a1a14b-9614-48f6-ac1b-9ed2fcd32dbc service nova] [instance: b6fddb0d-70f5-433f-a0ef-0d6bffb35579] Received event network-changed-24ac3187-6729-47ea-beb6-4c96018b8a05 {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 1110.850113] env[62503]: DEBUG nova.compute.manager [req-9fccfd88-e00f-4817-bade-e52eb525faa7 req-67a1a14b-9614-48f6-ac1b-9ed2fcd32dbc service nova] [instance: b6fddb0d-70f5-433f-a0ef-0d6bffb35579] Refreshing instance network info cache due to event network-changed-24ac3187-6729-47ea-beb6-4c96018b8a05. {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11432}} [ 1110.850113] env[62503]: DEBUG oslo_concurrency.lockutils [req-9fccfd88-e00f-4817-bade-e52eb525faa7 req-67a1a14b-9614-48f6-ac1b-9ed2fcd32dbc service nova] Acquiring lock "refresh_cache-b6fddb0d-70f5-433f-a0ef-0d6bffb35579" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1110.850113] env[62503]: DEBUG oslo_concurrency.lockutils [req-9fccfd88-e00f-4817-bade-e52eb525faa7 req-67a1a14b-9614-48f6-ac1b-9ed2fcd32dbc service nova] Acquired lock "refresh_cache-b6fddb0d-70f5-433f-a0ef-0d6bffb35579" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1110.850113] env[62503]: DEBUG nova.network.neutron [req-9fccfd88-e00f-4817-bade-e52eb525faa7 req-67a1a14b-9614-48f6-ac1b-9ed2fcd32dbc service nova] [instance: b6fddb0d-70f5-433f-a0ef-0d6bffb35579] Refreshing network info cache for port 24ac3187-6729-47ea-beb6-4c96018b8a05 {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1110.858119] env[62503]: DEBUG oslo_concurrency.lockutils [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Acquiring lock "refresh_cache-b6fddb0d-70f5-433f-a0ef-0d6bffb35579" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1111.529952] env[62503]: DEBUG nova.network.neutron [req-9fccfd88-e00f-4817-bade-e52eb525faa7 req-67a1a14b-9614-48f6-ac1b-9ed2fcd32dbc service nova] [instance: b6fddb0d-70f5-433f-a0ef-0d6bffb35579] Updated VIF entry in instance network info cache for port 24ac3187-6729-47ea-beb6-4c96018b8a05. {{(pid=62503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1111.530363] env[62503]: DEBUG nova.network.neutron [req-9fccfd88-e00f-4817-bade-e52eb525faa7 req-67a1a14b-9614-48f6-ac1b-9ed2fcd32dbc service nova] [instance: b6fddb0d-70f5-433f-a0ef-0d6bffb35579] Updating instance_info_cache with network_info: [{"id": "24ac3187-6729-47ea-beb6-4c96018b8a05", "address": "fa:16:3e:f9:f1:4f", "network": {"id": "1705446b-db63-4b14-9929-b692ca586b36", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1437181237-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e818e5ee9dc24efa96747c9558514a15", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "77aa121f-8fb6-42f3-aaea-43addfe449b2", "external-id": "nsx-vlan-transportzone-288", "segmentation_id": 288, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap24ac3187-67", "ovs_interfaceid": "24ac3187-6729-47ea-beb6-4c96018b8a05", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1112.033569] env[62503]: DEBUG oslo_concurrency.lockutils [req-9fccfd88-e00f-4817-bade-e52eb525faa7 req-67a1a14b-9614-48f6-ac1b-9ed2fcd32dbc service nova] Releasing lock "refresh_cache-b6fddb0d-70f5-433f-a0ef-0d6bffb35579" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1112.033961] env[62503]: DEBUG oslo_concurrency.lockutils [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Acquired lock "refresh_cache-b6fddb0d-70f5-433f-a0ef-0d6bffb35579" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1112.034136] env[62503]: DEBUG nova.network.neutron [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] [instance: b6fddb0d-70f5-433f-a0ef-0d6bffb35579] Forcefully refreshing network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1112.034332] env[62503]: DEBUG nova.objects.instance [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Lazy-loading 'info_cache' on Instance uuid b6fddb0d-70f5-433f-a0ef-0d6bffb35579 {{(pid=62503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1112.237563] env[62503]: DEBUG nova.compute.manager [None req-d197da2c-aa07-414e-a237-052bd700981c tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: b7c00736-b0c7-4f69-b47b-f904f84a8a2f] Stashing vm_state: active {{(pid=62503) _prep_resize /opt/stack/nova/nova/compute/manager.py:5920}} [ 1112.758143] env[62503]: DEBUG oslo_concurrency.lockutils [None req-d197da2c-aa07-414e-a237-052bd700981c tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1112.758440] env[62503]: DEBUG oslo_concurrency.lockutils [None req-d197da2c-aa07-414e-a237-052bd700981c tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1112.872580] env[62503]: DEBUG nova.compute.manager [req-2fa1b9a6-768c-4615-ba14-651652ca54a8 req-bb8852e4-def6-4f44-9aef-634e488c2b1e service nova] [instance: b7c00736-b0c7-4f69-b47b-f904f84a8a2f] Received event network-changed-05ae8546-f74a-4c0e-8dcf-be609104f0ba {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 1112.872849] env[62503]: DEBUG nova.compute.manager [req-2fa1b9a6-768c-4615-ba14-651652ca54a8 req-bb8852e4-def6-4f44-9aef-634e488c2b1e service nova] [instance: b7c00736-b0c7-4f69-b47b-f904f84a8a2f] Refreshing instance network info cache due to event network-changed-05ae8546-f74a-4c0e-8dcf-be609104f0ba. {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11432}} [ 1112.873097] env[62503]: DEBUG oslo_concurrency.lockutils [req-2fa1b9a6-768c-4615-ba14-651652ca54a8 req-bb8852e4-def6-4f44-9aef-634e488c2b1e service nova] Acquiring lock "refresh_cache-b7c00736-b0c7-4f69-b47b-f904f84a8a2f" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1112.873259] env[62503]: DEBUG oslo_concurrency.lockutils [req-2fa1b9a6-768c-4615-ba14-651652ca54a8 req-bb8852e4-def6-4f44-9aef-634e488c2b1e service nova] Acquired lock "refresh_cache-b7c00736-b0c7-4f69-b47b-f904f84a8a2f" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1112.873420] env[62503]: DEBUG nova.network.neutron [req-2fa1b9a6-768c-4615-ba14-651652ca54a8 req-bb8852e4-def6-4f44-9aef-634e488c2b1e service nova] [instance: b7c00736-b0c7-4f69-b47b-f904f84a8a2f] Refreshing network info cache for port 05ae8546-f74a-4c0e-8dcf-be609104f0ba {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1113.266921] env[62503]: INFO nova.compute.claims [None req-d197da2c-aa07-414e-a237-052bd700981c tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: b7c00736-b0c7-4f69-b47b-f904f84a8a2f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1113.565684] env[62503]: DEBUG nova.network.neutron [req-2fa1b9a6-768c-4615-ba14-651652ca54a8 req-bb8852e4-def6-4f44-9aef-634e488c2b1e service nova] [instance: b7c00736-b0c7-4f69-b47b-f904f84a8a2f] Updated VIF entry in instance network info cache for port 05ae8546-f74a-4c0e-8dcf-be609104f0ba. {{(pid=62503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1113.566066] env[62503]: DEBUG nova.network.neutron [req-2fa1b9a6-768c-4615-ba14-651652ca54a8 req-bb8852e4-def6-4f44-9aef-634e488c2b1e service nova] [instance: b7c00736-b0c7-4f69-b47b-f904f84a8a2f] Updating instance_info_cache with network_info: [{"id": "05ae8546-f74a-4c0e-8dcf-be609104f0ba", "address": "fa:16:3e:4e:ae:77", "network": {"id": "1705446b-db63-4b14-9929-b692ca586b36", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1437181237-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.196", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e818e5ee9dc24efa96747c9558514a15", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "77aa121f-8fb6-42f3-aaea-43addfe449b2", "external-id": "nsx-vlan-transportzone-288", "segmentation_id": 288, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap05ae8546-f7", "ovs_interfaceid": "05ae8546-f74a-4c0e-8dcf-be609104f0ba", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1113.723500] env[62503]: DEBUG nova.network.neutron [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] [instance: b6fddb0d-70f5-433f-a0ef-0d6bffb35579] Updating instance_info_cache with network_info: [{"id": "24ac3187-6729-47ea-beb6-4c96018b8a05", "address": "fa:16:3e:f9:f1:4f", "network": {"id": "1705446b-db63-4b14-9929-b692ca586b36", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1437181237-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e818e5ee9dc24efa96747c9558514a15", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "77aa121f-8fb6-42f3-aaea-43addfe449b2", "external-id": "nsx-vlan-transportzone-288", "segmentation_id": 288, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap24ac3187-67", "ovs_interfaceid": "24ac3187-6729-47ea-beb6-4c96018b8a05", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1113.772261] env[62503]: INFO nova.compute.resource_tracker [None req-d197da2c-aa07-414e-a237-052bd700981c tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: b7c00736-b0c7-4f69-b47b-f904f84a8a2f] Updating resource usage from migration 7a9474f6-b376-40f4-bfbf-18c7abb46a95 [ 1113.868859] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1896c084-5a85-43fa-9968-ca7974723bdd {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.876795] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca930525-cbb0-48b9-916c-9e111cfd2cc7 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.905792] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da76c700-24a3-46f5-9737-f29e96c03a97 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.912590] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5be7aa61-22bc-41b9-919f-4848a96e060a {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.925484] env[62503]: DEBUG nova.compute.provider_tree [None req-d197da2c-aa07-414e-a237-052bd700981c tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1114.069129] env[62503]: DEBUG oslo_concurrency.lockutils [req-2fa1b9a6-768c-4615-ba14-651652ca54a8 req-bb8852e4-def6-4f44-9aef-634e488c2b1e service nova] Releasing lock "refresh_cache-b7c00736-b0c7-4f69-b47b-f904f84a8a2f" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1114.226549] env[62503]: DEBUG oslo_concurrency.lockutils [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Releasing lock "refresh_cache-b6fddb0d-70f5-433f-a0ef-0d6bffb35579" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1114.226748] env[62503]: DEBUG nova.compute.manager [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] [instance: b6fddb0d-70f5-433f-a0ef-0d6bffb35579] Updated the network info_cache for instance {{(pid=62503) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10295}} [ 1114.226962] env[62503]: DEBUG oslo_service.periodic_task [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1114.227137] env[62503]: DEBUG oslo_service.periodic_task [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1114.227288] env[62503]: DEBUG oslo_service.periodic_task [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1114.227439] env[62503]: DEBUG oslo_service.periodic_task [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1114.227583] env[62503]: DEBUG oslo_service.periodic_task [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1114.227724] env[62503]: DEBUG oslo_service.periodic_task [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Running periodic task ComputeManager._sync_power_states {{(pid=62503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1114.428779] env[62503]: DEBUG nova.scheduler.client.report [None req-d197da2c-aa07-414e-a237-052bd700981c tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1114.731667] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Getting list of instances from cluster (obj){ [ 1114.731667] env[62503]: value = "domain-c8" [ 1114.731667] env[62503]: _type = "ClusterComputeResource" [ 1114.731667] env[62503]: } {{(pid=62503) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 1114.732738] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3aa36350-55d4-483d-a59a-66752618b152 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.750412] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Got total of 7 instances {{(pid=62503) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 1114.750578] env[62503]: DEBUG nova.compute.manager [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Triggering sync for uuid b6fddb0d-70f5-433f-a0ef-0d6bffb35579 {{(pid=62503) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10634}} [ 1114.750781] env[62503]: DEBUG nova.compute.manager [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Triggering sync for uuid 987b6101-565e-4eb2-b8af-f9afd5be38ce {{(pid=62503) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10634}} [ 1114.750932] env[62503]: DEBUG nova.compute.manager [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Triggering sync for uuid 485d3aba-6c0d-46c7-860b-c0dbd9c16498 {{(pid=62503) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10634}} [ 1114.751096] env[62503]: DEBUG nova.compute.manager [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Triggering sync for uuid 0ae63db4-6856-46d8-afa9-876b17152859 {{(pid=62503) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10634}} [ 1114.751251] env[62503]: DEBUG nova.compute.manager [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Triggering sync for uuid 94e69c2d-bf7a-42a8-a063-62ad1bb7f927 {{(pid=62503) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10634}} [ 1114.751395] env[62503]: DEBUG nova.compute.manager [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Triggering sync for uuid 477b21f4-cac1-48f1-862a-0b283b336d72 {{(pid=62503) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10634}} [ 1114.751547] env[62503]: DEBUG nova.compute.manager [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Triggering sync for uuid b7c00736-b0c7-4f69-b47b-f904f84a8a2f {{(pid=62503) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10634}} [ 1114.751862] env[62503]: DEBUG oslo_concurrency.lockutils [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Acquiring lock "b6fddb0d-70f5-433f-a0ef-0d6bffb35579" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1114.752096] env[62503]: DEBUG oslo_concurrency.lockutils [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Lock "b6fddb0d-70f5-433f-a0ef-0d6bffb35579" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1114.752365] env[62503]: DEBUG oslo_concurrency.lockutils [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Acquiring lock "987b6101-565e-4eb2-b8af-f9afd5be38ce" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1114.752550] env[62503]: DEBUG oslo_concurrency.lockutils [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Lock "987b6101-565e-4eb2-b8af-f9afd5be38ce" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1114.752804] env[62503]: DEBUG oslo_concurrency.lockutils [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Acquiring lock "485d3aba-6c0d-46c7-860b-c0dbd9c16498" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1114.752999] env[62503]: DEBUG oslo_concurrency.lockutils [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Lock "485d3aba-6c0d-46c7-860b-c0dbd9c16498" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1114.753249] env[62503]: DEBUG oslo_concurrency.lockutils [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Acquiring lock "0ae63db4-6856-46d8-afa9-876b17152859" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1114.753431] env[62503]: DEBUG oslo_concurrency.lockutils [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Lock "0ae63db4-6856-46d8-afa9-876b17152859" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1114.753651] env[62503]: DEBUG oslo_concurrency.lockutils [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Acquiring lock "94e69c2d-bf7a-42a8-a063-62ad1bb7f927" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1114.753840] env[62503]: DEBUG oslo_concurrency.lockutils [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Lock "94e69c2d-bf7a-42a8-a063-62ad1bb7f927" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1114.754092] env[62503]: DEBUG oslo_concurrency.lockutils [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Acquiring lock "477b21f4-cac1-48f1-862a-0b283b336d72" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1114.754279] env[62503]: DEBUG oslo_concurrency.lockutils [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Lock "477b21f4-cac1-48f1-862a-0b283b336d72" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1114.754504] env[62503]: DEBUG oslo_concurrency.lockutils [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Acquiring lock "b7c00736-b0c7-4f69-b47b-f904f84a8a2f" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1114.754698] env[62503]: DEBUG oslo_concurrency.lockutils [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Lock "b7c00736-b0c7-4f69-b47b-f904f84a8a2f" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1114.754867] env[62503]: INFO nova.compute.manager [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] [instance: b7c00736-b0c7-4f69-b47b-f904f84a8a2f] During sync_power_state the instance has a pending task (resize_prep). Skip. [ 1114.755045] env[62503]: DEBUG oslo_concurrency.lockutils [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Lock "b7c00736-b0c7-4f69-b47b-f904f84a8a2f" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1114.755233] env[62503]: DEBUG oslo_service.periodic_task [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1114.755365] env[62503]: DEBUG nova.compute.manager [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62503) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10843}} [ 1114.756072] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-369c8eb4-5d3d-4f44-8803-4016b7cc3fe0 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.758966] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-824a8bbd-9dd3-4f8e-b741-7c6fabf18285 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.761531] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-104c0963-7925-4be3-bc99-ee53f0b959b1 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.764180] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdd5bb28-78ef-4566-a4df-361f8d978fae {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.766661] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12fdd1ca-44d5-4193-a69e-bc2a68f436aa {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.770126] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de5d30a2-d9a1-4d8f-97a1-ba0dc6ce4020 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.772590] env[62503]: DEBUG oslo_service.periodic_task [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1114.933316] env[62503]: DEBUG oslo_concurrency.lockutils [None req-d197da2c-aa07-414e-a237-052bd700981c tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.175s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1114.933557] env[62503]: INFO nova.compute.manager [None req-d197da2c-aa07-414e-a237-052bd700981c tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: b7c00736-b0c7-4f69-b47b-f904f84a8a2f] Migrating [ 1115.277845] env[62503]: DEBUG oslo_concurrency.lockutils [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1115.278149] env[62503]: DEBUG oslo_concurrency.lockutils [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.001s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1115.278387] env[62503]: DEBUG oslo_concurrency.lockutils [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1115.278606] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62503) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1115.279824] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5240bd0b-7fdd-425a-b45b-2aefd386df83 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.291008] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e17cf28e-f6fc-4727-a0e9-3f90ab7885bf {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.296345] env[62503]: DEBUG oslo_concurrency.lockutils [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Lock "0ae63db4-6856-46d8-afa9-876b17152859" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.543s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1115.296758] env[62503]: DEBUG oslo_concurrency.lockutils [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Lock "b6fddb0d-70f5-433f-a0ef-0d6bffb35579" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.545s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1115.297162] env[62503]: DEBUG oslo_concurrency.lockutils [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Lock "485d3aba-6c0d-46c7-860b-c0dbd9c16498" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.544s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1115.310354] env[62503]: DEBUG oslo_concurrency.lockutils [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Lock "477b21f4-cac1-48f1-862a-0b283b336d72" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.556s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1115.310749] env[62503]: DEBUG oslo_concurrency.lockutils [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Lock "94e69c2d-bf7a-42a8-a063-62ad1bb7f927" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.557s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1115.311158] env[62503]: DEBUG oslo_concurrency.lockutils [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Lock "987b6101-565e-4eb2-b8af-f9afd5be38ce" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.558s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1115.312227] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09ec7c44-c442-4558-869a-f63d85564832 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.320089] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35cc4bcd-cf51-4c73-849f-00bb48dd20fb {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.362391] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180640MB free_disk=175GB free_vcpus=48 pci_devices=None {{(pid=62503) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1115.362596] env[62503]: DEBUG oslo_concurrency.lockutils [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1115.362857] env[62503]: DEBUG oslo_concurrency.lockutils [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1115.448189] env[62503]: DEBUG oslo_concurrency.lockutils [None req-d197da2c-aa07-414e-a237-052bd700981c tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Acquiring lock "refresh_cache-b7c00736-b0c7-4f69-b47b-f904f84a8a2f" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1115.448536] env[62503]: DEBUG oslo_concurrency.lockutils [None req-d197da2c-aa07-414e-a237-052bd700981c tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Acquired lock "refresh_cache-b7c00736-b0c7-4f69-b47b-f904f84a8a2f" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1115.448579] env[62503]: DEBUG nova.network.neutron [None req-d197da2c-aa07-414e-a237-052bd700981c tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: b7c00736-b0c7-4f69-b47b-f904f84a8a2f] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1116.348711] env[62503]: DEBUG nova.network.neutron [None req-d197da2c-aa07-414e-a237-052bd700981c tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: b7c00736-b0c7-4f69-b47b-f904f84a8a2f] Updating instance_info_cache with network_info: [{"id": "05ae8546-f74a-4c0e-8dcf-be609104f0ba", "address": "fa:16:3e:4e:ae:77", "network": {"id": "1705446b-db63-4b14-9929-b692ca586b36", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1437181237-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.196", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e818e5ee9dc24efa96747c9558514a15", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "77aa121f-8fb6-42f3-aaea-43addfe449b2", "external-id": "nsx-vlan-transportzone-288", "segmentation_id": 288, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap05ae8546-f7", "ovs_interfaceid": "05ae8546-f74a-4c0e-8dcf-be609104f0ba", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1116.372137] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Applying migration context for instance b7c00736-b0c7-4f69-b47b-f904f84a8a2f as it has an incoming, in-progress migration 7a9474f6-b376-40f4-bfbf-18c7abb46a95. Migration status is pre-migrating {{(pid=62503) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 1116.373227] env[62503]: INFO nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] [instance: b7c00736-b0c7-4f69-b47b-f904f84a8a2f] Updating resource usage from migration 7a9474f6-b376-40f4-bfbf-18c7abb46a95 [ 1116.388343] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Instance b6fddb0d-70f5-433f-a0ef-0d6bffb35579 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1116.388493] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Instance 987b6101-565e-4eb2-b8af-f9afd5be38ce actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1116.388617] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Instance 485d3aba-6c0d-46c7-860b-c0dbd9c16498 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1116.388735] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Instance 0ae63db4-6856-46d8-afa9-876b17152859 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1116.388852] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Instance 94e69c2d-bf7a-42a8-a063-62ad1bb7f927 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1116.388966] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Instance 477b21f4-cac1-48f1-862a-0b283b336d72 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1116.389100] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Migration 7a9474f6-b376-40f4-bfbf-18c7abb46a95 is active on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1116.389222] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Instance b7c00736-b0c7-4f69-b47b-f904f84a8a2f actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1116.389407] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Total usable vcpus: 48, total allocated vcpus: 8 {{(pid=62503) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1116.389546] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2112MB phys_disk=200GB used_disk=6GB total_vcpus=48 used_vcpus=8 pci_stats=[] {{(pid=62503) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1116.484189] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8ef5e54-88c2-46ec-b83a-60b067a8ed11 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.492869] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53a21e55-1394-4eec-9e29-e0b56cf6a8c8 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.523477] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3ec6e9d-bc7f-4eda-b344-f39de19d5773 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.531263] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef3e9d7e-a610-4242-9b1d-656301b9f28a {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.544339] env[62503]: DEBUG nova.compute.provider_tree [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1116.852103] env[62503]: DEBUG oslo_concurrency.lockutils [None req-d197da2c-aa07-414e-a237-052bd700981c tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Releasing lock "refresh_cache-b7c00736-b0c7-4f69-b47b-f904f84a8a2f" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1117.047370] env[62503]: DEBUG nova.scheduler.client.report [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1117.551602] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62503) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1117.551973] env[62503]: DEBUG oslo_concurrency.lockutils [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.189s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1118.367138] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aed44e5a-f9d1-4d1c-9ed4-5a0ce9521a84 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.386280] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-d197da2c-aa07-414e-a237-052bd700981c tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: b7c00736-b0c7-4f69-b47b-f904f84a8a2f] Updating instance 'b7c00736-b0c7-4f69-b47b-f904f84a8a2f' progress to 0 {{(pid=62503) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1118.892350] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-d197da2c-aa07-414e-a237-052bd700981c tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: b7c00736-b0c7-4f69-b47b-f904f84a8a2f] Powering off the VM {{(pid=62503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1118.892661] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-95b6f563-0144-4234-89a5-b227a586b8d9 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.900593] env[62503]: DEBUG oslo_vmware.api [None req-d197da2c-aa07-414e-a237-052bd700981c tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Waiting for the task: (returnval){ [ 1118.900593] env[62503]: value = "task-1388268" [ 1118.900593] env[62503]: _type = "Task" [ 1118.900593] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1118.908779] env[62503]: DEBUG oslo_vmware.api [None req-d197da2c-aa07-414e-a237-052bd700981c tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1388268, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1119.410668] env[62503]: DEBUG oslo_vmware.api [None req-d197da2c-aa07-414e-a237-052bd700981c tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1388268, 'name': PowerOffVM_Task, 'duration_secs': 0.303752} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1119.410930] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-d197da2c-aa07-414e-a237-052bd700981c tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: b7c00736-b0c7-4f69-b47b-f904f84a8a2f] Powered off the VM {{(pid=62503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1119.411139] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-d197da2c-aa07-414e-a237-052bd700981c tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: b7c00736-b0c7-4f69-b47b-f904f84a8a2f] Updating instance 'b7c00736-b0c7-4f69-b47b-f904f84a8a2f' progress to 17 {{(pid=62503) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1119.917746] env[62503]: DEBUG nova.virt.hardware [None req-d197da2c-aa07-414e-a237-052bd700981c tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:26:22Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1119.918045] env[62503]: DEBUG nova.virt.hardware [None req-d197da2c-aa07-414e-a237-052bd700981c tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Flavor limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1119.918201] env[62503]: DEBUG nova.virt.hardware [None req-d197da2c-aa07-414e-a237-052bd700981c tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Image limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1119.918402] env[62503]: DEBUG nova.virt.hardware [None req-d197da2c-aa07-414e-a237-052bd700981c tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Flavor pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1119.918550] env[62503]: DEBUG nova.virt.hardware [None req-d197da2c-aa07-414e-a237-052bd700981c tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Image pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1119.918707] env[62503]: DEBUG nova.virt.hardware [None req-d197da2c-aa07-414e-a237-052bd700981c tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1119.918925] env[62503]: DEBUG nova.virt.hardware [None req-d197da2c-aa07-414e-a237-052bd700981c tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1119.919113] env[62503]: DEBUG nova.virt.hardware [None req-d197da2c-aa07-414e-a237-052bd700981c tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1119.919295] env[62503]: DEBUG nova.virt.hardware [None req-d197da2c-aa07-414e-a237-052bd700981c tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Got 1 possible topologies {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1119.919465] env[62503]: DEBUG nova.virt.hardware [None req-d197da2c-aa07-414e-a237-052bd700981c tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1119.919646] env[62503]: DEBUG nova.virt.hardware [None req-d197da2c-aa07-414e-a237-052bd700981c tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1119.924837] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-387a150e-d9b9-49f1-8f44-a106141883d3 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.941848] env[62503]: DEBUG oslo_vmware.api [None req-d197da2c-aa07-414e-a237-052bd700981c tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Waiting for the task: (returnval){ [ 1119.941848] env[62503]: value = "task-1388269" [ 1119.941848] env[62503]: _type = "Task" [ 1119.941848] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1119.954629] env[62503]: DEBUG oslo_vmware.api [None req-d197da2c-aa07-414e-a237-052bd700981c tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1388269, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1120.454057] env[62503]: DEBUG oslo_vmware.api [None req-d197da2c-aa07-414e-a237-052bd700981c tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1388269, 'name': ReconfigVM_Task, 'duration_secs': 0.197904} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1120.454057] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-d197da2c-aa07-414e-a237-052bd700981c tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: b7c00736-b0c7-4f69-b47b-f904f84a8a2f] Updating instance 'b7c00736-b0c7-4f69-b47b-f904f84a8a2f' progress to 33 {{(pid=62503) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1120.959317] env[62503]: DEBUG nova.virt.hardware [None req-d197da2c-aa07-414e-a237-052bd700981c tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:26:20Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1120.959590] env[62503]: DEBUG nova.virt.hardware [None req-d197da2c-aa07-414e-a237-052bd700981c tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Flavor limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1120.959744] env[62503]: DEBUG nova.virt.hardware [None req-d197da2c-aa07-414e-a237-052bd700981c tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Image limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1120.959932] env[62503]: DEBUG nova.virt.hardware [None req-d197da2c-aa07-414e-a237-052bd700981c tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Flavor pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1120.960098] env[62503]: DEBUG nova.virt.hardware [None req-d197da2c-aa07-414e-a237-052bd700981c tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Image pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1120.960254] env[62503]: DEBUG nova.virt.hardware [None req-d197da2c-aa07-414e-a237-052bd700981c tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1120.960462] env[62503]: DEBUG nova.virt.hardware [None req-d197da2c-aa07-414e-a237-052bd700981c tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1120.960626] env[62503]: DEBUG nova.virt.hardware [None req-d197da2c-aa07-414e-a237-052bd700981c tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1120.960806] env[62503]: DEBUG nova.virt.hardware [None req-d197da2c-aa07-414e-a237-052bd700981c tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Got 1 possible topologies {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1120.961016] env[62503]: DEBUG nova.virt.hardware [None req-d197da2c-aa07-414e-a237-052bd700981c tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1120.961210] env[62503]: DEBUG nova.virt.hardware [None req-d197da2c-aa07-414e-a237-052bd700981c tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1120.966581] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-d197da2c-aa07-414e-a237-052bd700981c tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: b7c00736-b0c7-4f69-b47b-f904f84a8a2f] Reconfiguring VM instance instance-0000006c to detach disk 2000 {{(pid=62503) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1120.966879] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-aed77820-e4de-461e-918b-5d2551d5f10d {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.986979] env[62503]: DEBUG oslo_vmware.api [None req-d197da2c-aa07-414e-a237-052bd700981c tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Waiting for the task: (returnval){ [ 1120.986979] env[62503]: value = "task-1388270" [ 1120.986979] env[62503]: _type = "Task" [ 1120.986979] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1120.994919] env[62503]: DEBUG oslo_vmware.api [None req-d197da2c-aa07-414e-a237-052bd700981c tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1388270, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1121.497580] env[62503]: DEBUG oslo_vmware.api [None req-d197da2c-aa07-414e-a237-052bd700981c tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1388270, 'name': ReconfigVM_Task, 'duration_secs': 0.159411} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1121.497853] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-d197da2c-aa07-414e-a237-052bd700981c tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: b7c00736-b0c7-4f69-b47b-f904f84a8a2f] Reconfigured VM instance instance-0000006c to detach disk 2000 {{(pid=62503) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1121.498619] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96781a4d-0f39-466e-aaef-dc198c0c4703 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.520022] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-d197da2c-aa07-414e-a237-052bd700981c tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: b7c00736-b0c7-4f69-b47b-f904f84a8a2f] Reconfiguring VM instance instance-0000006c to attach disk [datastore2] volume-70581b1b-a294-4141-b6cc-480701c30979/volume-70581b1b-a294-4141-b6cc-480701c30979.vmdk or device None with type thin {{(pid=62503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1121.520572] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0461241e-a7e5-401d-b0e0-1a72ec92ec3e {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.538049] env[62503]: DEBUG oslo_vmware.api [None req-d197da2c-aa07-414e-a237-052bd700981c tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Waiting for the task: (returnval){ [ 1121.538049] env[62503]: value = "task-1388271" [ 1121.538049] env[62503]: _type = "Task" [ 1121.538049] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1121.545531] env[62503]: DEBUG oslo_vmware.api [None req-d197da2c-aa07-414e-a237-052bd700981c tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1388271, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1122.047892] env[62503]: DEBUG oslo_vmware.api [None req-d197da2c-aa07-414e-a237-052bd700981c tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1388271, 'name': ReconfigVM_Task, 'duration_secs': 0.238564} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1122.048223] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-d197da2c-aa07-414e-a237-052bd700981c tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: b7c00736-b0c7-4f69-b47b-f904f84a8a2f] Reconfigured VM instance instance-0000006c to attach disk [datastore2] volume-70581b1b-a294-4141-b6cc-480701c30979/volume-70581b1b-a294-4141-b6cc-480701c30979.vmdk or device None with type thin {{(pid=62503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1122.048415] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-d197da2c-aa07-414e-a237-052bd700981c tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: b7c00736-b0c7-4f69-b47b-f904f84a8a2f] Updating instance 'b7c00736-b0c7-4f69-b47b-f904f84a8a2f' progress to 50 {{(pid=62503) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1122.555517] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf820e9c-4ebc-4160-802c-0dab77d1264c {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.574149] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df22a04b-fe2a-47ee-8c8c-e68cbee46835 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.590590] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-d197da2c-aa07-414e-a237-052bd700981c tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: b7c00736-b0c7-4f69-b47b-f904f84a8a2f] Updating instance 'b7c00736-b0c7-4f69-b47b-f904f84a8a2f' progress to 67 {{(pid=62503) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1124.203537] env[62503]: DEBUG oslo_concurrency.lockutils [None req-46a133d7-3f23-49ea-80ef-1120a4645a16 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Acquiring lock "94e69c2d-bf7a-42a8-a063-62ad1bb7f927" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1124.203787] env[62503]: DEBUG oslo_concurrency.lockutils [None req-46a133d7-3f23-49ea-80ef-1120a4645a16 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Lock "94e69c2d-bf7a-42a8-a063-62ad1bb7f927" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1124.230681] env[62503]: DEBUG nova.network.neutron [None req-d197da2c-aa07-414e-a237-052bd700981c tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: b7c00736-b0c7-4f69-b47b-f904f84a8a2f] Port 05ae8546-f74a-4c0e-8dcf-be609104f0ba binding to destination host cpu-1 is already ACTIVE {{(pid=62503) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1124.707035] env[62503]: DEBUG nova.compute.utils [None req-46a133d7-3f23-49ea-80ef-1120a4645a16 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Using /dev/sd instead of None {{(pid=62503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1125.209888] env[62503]: DEBUG oslo_concurrency.lockutils [None req-46a133d7-3f23-49ea-80ef-1120a4645a16 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Lock "94e69c2d-bf7a-42a8-a063-62ad1bb7f927" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1125.250498] env[62503]: DEBUG oslo_concurrency.lockutils [None req-d197da2c-aa07-414e-a237-052bd700981c tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Acquiring lock "b7c00736-b0c7-4f69-b47b-f904f84a8a2f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1125.250725] env[62503]: DEBUG oslo_concurrency.lockutils [None req-d197da2c-aa07-414e-a237-052bd700981c tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Lock "b7c00736-b0c7-4f69-b47b-f904f84a8a2f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1125.250911] env[62503]: DEBUG oslo_concurrency.lockutils [None req-d197da2c-aa07-414e-a237-052bd700981c tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Lock "b7c00736-b0c7-4f69-b47b-f904f84a8a2f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1126.268176] env[62503]: DEBUG oslo_concurrency.lockutils [None req-46a133d7-3f23-49ea-80ef-1120a4645a16 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Acquiring lock "94e69c2d-bf7a-42a8-a063-62ad1bb7f927" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1126.268518] env[62503]: DEBUG oslo_concurrency.lockutils [None req-46a133d7-3f23-49ea-80ef-1120a4645a16 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Lock "94e69c2d-bf7a-42a8-a063-62ad1bb7f927" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1126.268653] env[62503]: INFO nova.compute.manager [None req-46a133d7-3f23-49ea-80ef-1120a4645a16 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 94e69c2d-bf7a-42a8-a063-62ad1bb7f927] Attaching volume 0596d168-bfef-4df2-ac6e-ab24dec409e8 to /dev/sdb [ 1126.286672] env[62503]: DEBUG oslo_concurrency.lockutils [None req-d197da2c-aa07-414e-a237-052bd700981c tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Acquiring lock "refresh_cache-b7c00736-b0c7-4f69-b47b-f904f84a8a2f" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1126.286856] env[62503]: DEBUG oslo_concurrency.lockutils [None req-d197da2c-aa07-414e-a237-052bd700981c tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Acquired lock "refresh_cache-b7c00736-b0c7-4f69-b47b-f904f84a8a2f" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1126.287047] env[62503]: DEBUG nova.network.neutron [None req-d197da2c-aa07-414e-a237-052bd700981c tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: b7c00736-b0c7-4f69-b47b-f904f84a8a2f] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1126.299719] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4b43f87-e545-4f87-9c35-8a4b1e96f6f8 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.306855] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7f8f546-d030-4546-8de8-7799805e0e61 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.320177] env[62503]: DEBUG nova.virt.block_device [None req-46a133d7-3f23-49ea-80ef-1120a4645a16 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 94e69c2d-bf7a-42a8-a063-62ad1bb7f927] Updating existing volume attachment record: 206ff959-6453-45d0-a2ff-31c29b240078 {{(pid=62503) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1126.999100] env[62503]: DEBUG nova.network.neutron [None req-d197da2c-aa07-414e-a237-052bd700981c tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: b7c00736-b0c7-4f69-b47b-f904f84a8a2f] Updating instance_info_cache with network_info: [{"id": "05ae8546-f74a-4c0e-8dcf-be609104f0ba", "address": "fa:16:3e:4e:ae:77", "network": {"id": "1705446b-db63-4b14-9929-b692ca586b36", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1437181237-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.196", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e818e5ee9dc24efa96747c9558514a15", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "77aa121f-8fb6-42f3-aaea-43addfe449b2", "external-id": "nsx-vlan-transportzone-288", "segmentation_id": 288, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap05ae8546-f7", "ovs_interfaceid": "05ae8546-f74a-4c0e-8dcf-be609104f0ba", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1127.502396] env[62503]: DEBUG oslo_concurrency.lockutils [None req-d197da2c-aa07-414e-a237-052bd700981c tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Releasing lock "refresh_cache-b7c00736-b0c7-4f69-b47b-f904f84a8a2f" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1128.011957] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e5b35ed-e9ef-4ca7-b9a6-25f6f72741a3 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.018943] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67011735-d112-4ffe-9098-a2aa20478832 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.111833] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1669c78-9867-49c0-97a8-a246df3de79a {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.130603] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b463d94c-598b-4c33-acff-22f53c96feea {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.137328] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-d197da2c-aa07-414e-a237-052bd700981c tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: b7c00736-b0c7-4f69-b47b-f904f84a8a2f] Updating instance 'b7c00736-b0c7-4f69-b47b-f904f84a8a2f' progress to 83 {{(pid=62503) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1129.643882] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-d197da2c-aa07-414e-a237-052bd700981c tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: b7c00736-b0c7-4f69-b47b-f904f84a8a2f] Powering on the VM {{(pid=62503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1129.644227] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9e4fd1d2-70e9-44d7-81e7-427172728da3 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.651421] env[62503]: DEBUG oslo_vmware.api [None req-d197da2c-aa07-414e-a237-052bd700981c tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Waiting for the task: (returnval){ [ 1129.651421] env[62503]: value = "task-1388276" [ 1129.651421] env[62503]: _type = "Task" [ 1129.651421] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1129.659204] env[62503]: DEBUG oslo_vmware.api [None req-d197da2c-aa07-414e-a237-052bd700981c tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1388276, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1130.161265] env[62503]: DEBUG oslo_vmware.api [None req-d197da2c-aa07-414e-a237-052bd700981c tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1388276, 'name': PowerOnVM_Task, 'duration_secs': 0.394093} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1130.161603] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-d197da2c-aa07-414e-a237-052bd700981c tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: b7c00736-b0c7-4f69-b47b-f904f84a8a2f] Powered on the VM {{(pid=62503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1130.161699] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-d197da2c-aa07-414e-a237-052bd700981c tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: b7c00736-b0c7-4f69-b47b-f904f84a8a2f] Updating instance 'b7c00736-b0c7-4f69-b47b-f904f84a8a2f' progress to 100 {{(pid=62503) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1130.862663] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-46a133d7-3f23-49ea-80ef-1120a4645a16 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 94e69c2d-bf7a-42a8-a063-62ad1bb7f927] Volume attach. Driver type: vmdk {{(pid=62503) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1130.862949] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-46a133d7-3f23-49ea-80ef-1120a4645a16 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 94e69c2d-bf7a-42a8-a063-62ad1bb7f927] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-294659', 'volume_id': '0596d168-bfef-4df2-ac6e-ab24dec409e8', 'name': 'volume-0596d168-bfef-4df2-ac6e-ab24dec409e8', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '94e69c2d-bf7a-42a8-a063-62ad1bb7f927', 'attached_at': '', 'detached_at': '', 'volume_id': '0596d168-bfef-4df2-ac6e-ab24dec409e8', 'serial': '0596d168-bfef-4df2-ac6e-ab24dec409e8'} {{(pid=62503) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1130.863847] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ce31e47-e03f-4a01-8485-1c6f956d0160 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.880823] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58510568-fd09-4ef5-bc54-eeedc64644c2 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.903755] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-46a133d7-3f23-49ea-80ef-1120a4645a16 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 94e69c2d-bf7a-42a8-a063-62ad1bb7f927] Reconfiguring VM instance instance-0000006a to attach disk [datastore2] volume-0596d168-bfef-4df2-ac6e-ab24dec409e8/volume-0596d168-bfef-4df2-ac6e-ab24dec409e8.vmdk or device None with type thin {{(pid=62503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1130.904020] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a8c63ce3-753e-4ec6-9cfd-16aca38ceddc {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.920276] env[62503]: DEBUG oslo_vmware.api [None req-46a133d7-3f23-49ea-80ef-1120a4645a16 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Waiting for the task: (returnval){ [ 1130.920276] env[62503]: value = "task-1388277" [ 1130.920276] env[62503]: _type = "Task" [ 1130.920276] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1130.927536] env[62503]: DEBUG oslo_vmware.api [None req-46a133d7-3f23-49ea-80ef-1120a4645a16 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Task: {'id': task-1388277, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1131.429724] env[62503]: DEBUG oslo_vmware.api [None req-46a133d7-3f23-49ea-80ef-1120a4645a16 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Task: {'id': task-1388277, 'name': ReconfigVM_Task, 'duration_secs': 0.335756} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1131.430112] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-46a133d7-3f23-49ea-80ef-1120a4645a16 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 94e69c2d-bf7a-42a8-a063-62ad1bb7f927] Reconfigured VM instance instance-0000006a to attach disk [datastore2] volume-0596d168-bfef-4df2-ac6e-ab24dec409e8/volume-0596d168-bfef-4df2-ac6e-ab24dec409e8.vmdk or device None with type thin {{(pid=62503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1131.434814] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b2993220-7ac6-4d6e-9a5c-972eed906be5 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.448527] env[62503]: DEBUG oslo_vmware.api [None req-46a133d7-3f23-49ea-80ef-1120a4645a16 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Waiting for the task: (returnval){ [ 1131.448527] env[62503]: value = "task-1388278" [ 1131.448527] env[62503]: _type = "Task" [ 1131.448527] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1131.455578] env[62503]: DEBUG oslo_vmware.api [None req-46a133d7-3f23-49ea-80ef-1120a4645a16 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Task: {'id': task-1388278, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1131.958647] env[62503]: DEBUG oslo_vmware.api [None req-46a133d7-3f23-49ea-80ef-1120a4645a16 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Task: {'id': task-1388278, 'name': ReconfigVM_Task, 'duration_secs': 0.128547} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1131.959039] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-46a133d7-3f23-49ea-80ef-1120a4645a16 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 94e69c2d-bf7a-42a8-a063-62ad1bb7f927] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-294659', 'volume_id': '0596d168-bfef-4df2-ac6e-ab24dec409e8', 'name': 'volume-0596d168-bfef-4df2-ac6e-ab24dec409e8', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '94e69c2d-bf7a-42a8-a063-62ad1bb7f927', 'attached_at': '', 'detached_at': '', 'volume_id': '0596d168-bfef-4df2-ac6e-ab24dec409e8', 'serial': '0596d168-bfef-4df2-ac6e-ab24dec409e8'} {{(pid=62503) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1132.830384] env[62503]: DEBUG oslo_concurrency.lockutils [None req-bf098c1e-32f8-42b7-80fe-560805afcd4c tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Acquiring lock "b7c00736-b0c7-4f69-b47b-f904f84a8a2f" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1132.830755] env[62503]: DEBUG oslo_concurrency.lockutils [None req-bf098c1e-32f8-42b7-80fe-560805afcd4c tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Lock "b7c00736-b0c7-4f69-b47b-f904f84a8a2f" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.001s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1132.830935] env[62503]: DEBUG nova.compute.manager [None req-bf098c1e-32f8-42b7-80fe-560805afcd4c tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: b7c00736-b0c7-4f69-b47b-f904f84a8a2f] Going to confirm migration 3 {{(pid=62503) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5079}} [ 1132.995661] env[62503]: DEBUG nova.objects.instance [None req-46a133d7-3f23-49ea-80ef-1120a4645a16 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Lazy-loading 'flavor' on Instance uuid 94e69c2d-bf7a-42a8-a063-62ad1bb7f927 {{(pid=62503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1133.391049] env[62503]: DEBUG oslo_concurrency.lockutils [None req-bf098c1e-32f8-42b7-80fe-560805afcd4c tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Acquiring lock "refresh_cache-b7c00736-b0c7-4f69-b47b-f904f84a8a2f" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1133.391261] env[62503]: DEBUG oslo_concurrency.lockutils [None req-bf098c1e-32f8-42b7-80fe-560805afcd4c tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Acquired lock "refresh_cache-b7c00736-b0c7-4f69-b47b-f904f84a8a2f" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1133.391441] env[62503]: DEBUG nova.network.neutron [None req-bf098c1e-32f8-42b7-80fe-560805afcd4c tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: b7c00736-b0c7-4f69-b47b-f904f84a8a2f] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1133.391628] env[62503]: DEBUG nova.objects.instance [None req-bf098c1e-32f8-42b7-80fe-560805afcd4c tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Lazy-loading 'info_cache' on Instance uuid b7c00736-b0c7-4f69-b47b-f904f84a8a2f {{(pid=62503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1133.500989] env[62503]: DEBUG oslo_concurrency.lockutils [None req-46a133d7-3f23-49ea-80ef-1120a4645a16 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Lock "94e69c2d-bf7a-42a8-a063-62ad1bb7f927" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.232s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1133.618688] env[62503]: INFO nova.compute.manager [None req-818bf7ee-b7f4-4d0c-b0a6-90cc8f0af9ad tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 94e69c2d-bf7a-42a8-a063-62ad1bb7f927] Rescuing [ 1133.618984] env[62503]: DEBUG oslo_concurrency.lockutils [None req-818bf7ee-b7f4-4d0c-b0a6-90cc8f0af9ad tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Acquiring lock "refresh_cache-94e69c2d-bf7a-42a8-a063-62ad1bb7f927" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1133.619183] env[62503]: DEBUG oslo_concurrency.lockutils [None req-818bf7ee-b7f4-4d0c-b0a6-90cc8f0af9ad tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Acquired lock "refresh_cache-94e69c2d-bf7a-42a8-a063-62ad1bb7f927" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1133.619710] env[62503]: DEBUG nova.network.neutron [None req-818bf7ee-b7f4-4d0c-b0a6-90cc8f0af9ad tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 94e69c2d-bf7a-42a8-a063-62ad1bb7f927] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1134.393278] env[62503]: DEBUG nova.network.neutron [None req-818bf7ee-b7f4-4d0c-b0a6-90cc8f0af9ad tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 94e69c2d-bf7a-42a8-a063-62ad1bb7f927] Updating instance_info_cache with network_info: [{"id": "0a00591c-6583-407d-bf50-60a53719508b", "address": "fa:16:3e:71:8a:86", "network": {"id": "62a43567-d25a-406f-a4c8-68e056f67595", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-156718245-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.234", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b86eee9480274a9196fc8ccd920671f0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b8b5b5e2-866d-4ab5-b74d-4a47de0c4877", "external-id": "nsx-vlan-transportzone-74", "segmentation_id": 74, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0a00591c-65", "ovs_interfaceid": "0a00591c-6583-407d-bf50-60a53719508b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1134.603872] env[62503]: DEBUG nova.network.neutron [None req-bf098c1e-32f8-42b7-80fe-560805afcd4c tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: b7c00736-b0c7-4f69-b47b-f904f84a8a2f] Updating instance_info_cache with network_info: [{"id": "05ae8546-f74a-4c0e-8dcf-be609104f0ba", "address": "fa:16:3e:4e:ae:77", "network": {"id": "1705446b-db63-4b14-9929-b692ca586b36", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1437181237-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.196", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e818e5ee9dc24efa96747c9558514a15", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "77aa121f-8fb6-42f3-aaea-43addfe449b2", "external-id": "nsx-vlan-transportzone-288", "segmentation_id": 288, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap05ae8546-f7", "ovs_interfaceid": "05ae8546-f74a-4c0e-8dcf-be609104f0ba", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1134.896389] env[62503]: DEBUG oslo_concurrency.lockutils [None req-818bf7ee-b7f4-4d0c-b0a6-90cc8f0af9ad tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Releasing lock "refresh_cache-94e69c2d-bf7a-42a8-a063-62ad1bb7f927" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1135.107190] env[62503]: DEBUG oslo_concurrency.lockutils [None req-bf098c1e-32f8-42b7-80fe-560805afcd4c tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Releasing lock "refresh_cache-b7c00736-b0c7-4f69-b47b-f904f84a8a2f" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1135.107460] env[62503]: DEBUG nova.objects.instance [None req-bf098c1e-32f8-42b7-80fe-560805afcd4c tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Lazy-loading 'migration_context' on Instance uuid b7c00736-b0c7-4f69-b47b-f904f84a8a2f {{(pid=62503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1135.610679] env[62503]: DEBUG nova.objects.base [None req-bf098c1e-32f8-42b7-80fe-560805afcd4c tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Object Instance lazy-loaded attributes: info_cache,migration_context {{(pid=62503) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1135.611759] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ec2e769-72cb-46bd-a734-06a443025635 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.631134] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8f6fb23a-7f44-42af-b250-4f7384af7f21 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.636380] env[62503]: DEBUG oslo_vmware.api [None req-bf098c1e-32f8-42b7-80fe-560805afcd4c tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Waiting for the task: (returnval){ [ 1135.636380] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]5206b502-f56f-4016-6cee-c6392fac602b" [ 1135.636380] env[62503]: _type = "Task" [ 1135.636380] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1135.643990] env[62503]: DEBUG oslo_vmware.api [None req-bf098c1e-32f8-42b7-80fe-560805afcd4c tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]5206b502-f56f-4016-6cee-c6392fac602b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1135.930769] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-818bf7ee-b7f4-4d0c-b0a6-90cc8f0af9ad tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 94e69c2d-bf7a-42a8-a063-62ad1bb7f927] Powering off the VM {{(pid=62503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1135.931161] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9d63377b-1dc8-4a37-a146-dd5b45a7c18d {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.939113] env[62503]: DEBUG oslo_vmware.api [None req-818bf7ee-b7f4-4d0c-b0a6-90cc8f0af9ad tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Waiting for the task: (returnval){ [ 1135.939113] env[62503]: value = "task-1388279" [ 1135.939113] env[62503]: _type = "Task" [ 1135.939113] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1135.946634] env[62503]: DEBUG oslo_vmware.api [None req-818bf7ee-b7f4-4d0c-b0a6-90cc8f0af9ad tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Task: {'id': task-1388279, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1136.146497] env[62503]: DEBUG oslo_vmware.api [None req-bf098c1e-32f8-42b7-80fe-560805afcd4c tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]5206b502-f56f-4016-6cee-c6392fac602b, 'name': SearchDatastore_Task, 'duration_secs': 0.009303} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1136.146791] env[62503]: DEBUG oslo_concurrency.lockutils [None req-bf098c1e-32f8-42b7-80fe-560805afcd4c tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1136.147069] env[62503]: DEBUG oslo_concurrency.lockutils [None req-bf098c1e-32f8-42b7-80fe-560805afcd4c tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1136.448748] env[62503]: DEBUG oslo_vmware.api [None req-818bf7ee-b7f4-4d0c-b0a6-90cc8f0af9ad tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Task: {'id': task-1388279, 'name': PowerOffVM_Task, 'duration_secs': 0.172283} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1136.449027] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-818bf7ee-b7f4-4d0c-b0a6-90cc8f0af9ad tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 94e69c2d-bf7a-42a8-a063-62ad1bb7f927] Powered off the VM {{(pid=62503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1136.449810] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-691131ff-b982-4247-a4ba-9796e2e1e730 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.469476] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-579e9b81-e1ca-4f41-9580-06e7bc8b0cd7 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.496013] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-818bf7ee-b7f4-4d0c-b0a6-90cc8f0af9ad tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 94e69c2d-bf7a-42a8-a063-62ad1bb7f927] Powering off the VM {{(pid=62503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1136.496363] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7b3899d5-2325-4902-a205-2ee417cab44b {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.502731] env[62503]: DEBUG oslo_vmware.api [None req-818bf7ee-b7f4-4d0c-b0a6-90cc8f0af9ad tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Waiting for the task: (returnval){ [ 1136.502731] env[62503]: value = "task-1388280" [ 1136.502731] env[62503]: _type = "Task" [ 1136.502731] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1136.513024] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-818bf7ee-b7f4-4d0c-b0a6-90cc8f0af9ad tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 94e69c2d-bf7a-42a8-a063-62ad1bb7f927] VM already powered off {{(pid=62503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1136.513024] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-818bf7ee-b7f4-4d0c-b0a6-90cc8f0af9ad tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 94e69c2d-bf7a-42a8-a063-62ad1bb7f927] Processing image 8150ca02-f879-471d-8913-459408f127a1 {{(pid=62503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1136.513266] env[62503]: DEBUG oslo_concurrency.lockutils [None req-818bf7ee-b7f4-4d0c-b0a6-90cc8f0af9ad tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1136.514225] env[62503]: DEBUG oslo_concurrency.lockutils [None req-818bf7ee-b7f4-4d0c-b0a6-90cc8f0af9ad tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1136.514225] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-818bf7ee-b7f4-4d0c-b0a6-90cc8f0af9ad tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1136.514225] env[62503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cbc73be8-6551-422e-8e29-76a207a35c33 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.521291] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-818bf7ee-b7f4-4d0c-b0a6-90cc8f0af9ad tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1136.521475] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-818bf7ee-b7f4-4d0c-b0a6-90cc8f0af9ad tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1136.522169] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-22215a6c-7d69-4255-a0d7-97f8c1a0b815 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.527051] env[62503]: DEBUG oslo_vmware.api [None req-818bf7ee-b7f4-4d0c-b0a6-90cc8f0af9ad tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Waiting for the task: (returnval){ [ 1136.527051] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52d13f13-9e5d-b1ee-cd50-5e52c127d2f7" [ 1136.527051] env[62503]: _type = "Task" [ 1136.527051] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1136.533935] env[62503]: DEBUG oslo_vmware.api [None req-818bf7ee-b7f4-4d0c-b0a6-90cc8f0af9ad tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52d13f13-9e5d-b1ee-cd50-5e52c127d2f7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1136.754930] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f99b47d6-cca8-401c-95dc-3a4738b6c8a6 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.762256] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce9a37a5-ae32-4f83-86ce-c8c1d5540f5b {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.791922] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1495fd98-b9d8-4e38-87c1-e1859fd2b48c {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.798657] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f543ada8-c799-4a68-bdc8-830ab7a46b1f {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.811778] env[62503]: DEBUG nova.compute.provider_tree [None req-bf098c1e-32f8-42b7-80fe-560805afcd4c tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1137.037356] env[62503]: DEBUG oslo_vmware.api [None req-818bf7ee-b7f4-4d0c-b0a6-90cc8f0af9ad tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52d13f13-9e5d-b1ee-cd50-5e52c127d2f7, 'name': SearchDatastore_Task, 'duration_secs': 0.009211} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1137.038071] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b190ac20-1977-4e9d-ae20-f49bca441c4f {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.042963] env[62503]: DEBUG oslo_vmware.api [None req-818bf7ee-b7f4-4d0c-b0a6-90cc8f0af9ad tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Waiting for the task: (returnval){ [ 1137.042963] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]522621dd-1a3c-88ea-e56d-433a6858c7d6" [ 1137.042963] env[62503]: _type = "Task" [ 1137.042963] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1137.050493] env[62503]: DEBUG oslo_vmware.api [None req-818bf7ee-b7f4-4d0c-b0a6-90cc8f0af9ad tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]522621dd-1a3c-88ea-e56d-433a6858c7d6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1137.314953] env[62503]: DEBUG nova.scheduler.client.report [None req-bf098c1e-32f8-42b7-80fe-560805afcd4c tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1137.554590] env[62503]: DEBUG oslo_vmware.api [None req-818bf7ee-b7f4-4d0c-b0a6-90cc8f0af9ad tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]522621dd-1a3c-88ea-e56d-433a6858c7d6, 'name': SearchDatastore_Task, 'duration_secs': 0.008624} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1137.554849] env[62503]: DEBUG oslo_concurrency.lockutils [None req-818bf7ee-b7f4-4d0c-b0a6-90cc8f0af9ad tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1137.555131] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-818bf7ee-b7f4-4d0c-b0a6-90cc8f0af9ad tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Copying virtual disk from [datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk to [datastore2] 94e69c2d-bf7a-42a8-a063-62ad1bb7f927/8150ca02-f879-471d-8913-459408f127a1-rescue.vmdk. {{(pid=62503) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1137.555449] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1266b2c3-cc45-46fa-97c9-5884a2c9cb58 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.562040] env[62503]: DEBUG oslo_vmware.api [None req-818bf7ee-b7f4-4d0c-b0a6-90cc8f0af9ad tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Waiting for the task: (returnval){ [ 1137.562040] env[62503]: value = "task-1388281" [ 1137.562040] env[62503]: _type = "Task" [ 1137.562040] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1137.569049] env[62503]: DEBUG oslo_vmware.api [None req-818bf7ee-b7f4-4d0c-b0a6-90cc8f0af9ad tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Task: {'id': task-1388281, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1137.755876] env[62503]: DEBUG oslo_concurrency.lockutils [None req-7521548f-fc3e-4365-819b-172b0a1c7981 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Acquiring lock "477b21f4-cac1-48f1-862a-0b283b336d72" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1137.756269] env[62503]: DEBUG oslo_concurrency.lockutils [None req-7521548f-fc3e-4365-819b-172b0a1c7981 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Lock "477b21f4-cac1-48f1-862a-0b283b336d72" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1138.073052] env[62503]: DEBUG oslo_vmware.api [None req-818bf7ee-b7f4-4d0c-b0a6-90cc8f0af9ad tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Task: {'id': task-1388281, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.445737} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1138.073338] env[62503]: INFO nova.virt.vmwareapi.ds_util [None req-818bf7ee-b7f4-4d0c-b0a6-90cc8f0af9ad tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Copied virtual disk from [datastore2] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk to [datastore2] 94e69c2d-bf7a-42a8-a063-62ad1bb7f927/8150ca02-f879-471d-8913-459408f127a1-rescue.vmdk. [ 1138.074086] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c14a15f-0664-47a2-abdb-29fa3e14a343 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.100515] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-818bf7ee-b7f4-4d0c-b0a6-90cc8f0af9ad tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 94e69c2d-bf7a-42a8-a063-62ad1bb7f927] Reconfiguring VM instance instance-0000006a to attach disk [datastore2] 94e69c2d-bf7a-42a8-a063-62ad1bb7f927/8150ca02-f879-471d-8913-459408f127a1-rescue.vmdk or device None with type thin {{(pid=62503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1138.101056] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c8329c63-fc03-4c9f-b3ef-b87da0e96643 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.118389] env[62503]: DEBUG oslo_vmware.api [None req-818bf7ee-b7f4-4d0c-b0a6-90cc8f0af9ad tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Waiting for the task: (returnval){ [ 1138.118389] env[62503]: value = "task-1388282" [ 1138.118389] env[62503]: _type = "Task" [ 1138.118389] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1138.125440] env[62503]: DEBUG oslo_vmware.api [None req-818bf7ee-b7f4-4d0c-b0a6-90cc8f0af9ad tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Task: {'id': task-1388282, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1138.259975] env[62503]: DEBUG nova.compute.utils [None req-7521548f-fc3e-4365-819b-172b0a1c7981 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Using /dev/sd instead of None {{(pid=62503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1138.325486] env[62503]: DEBUG oslo_concurrency.lockutils [None req-bf098c1e-32f8-42b7-80fe-560805afcd4c tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.178s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1138.628172] env[62503]: DEBUG oslo_vmware.api [None req-818bf7ee-b7f4-4d0c-b0a6-90cc8f0af9ad tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Task: {'id': task-1388282, 'name': ReconfigVM_Task, 'duration_secs': 0.334803} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1138.628474] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-818bf7ee-b7f4-4d0c-b0a6-90cc8f0af9ad tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 94e69c2d-bf7a-42a8-a063-62ad1bb7f927] Reconfigured VM instance instance-0000006a to attach disk [datastore2] 94e69c2d-bf7a-42a8-a063-62ad1bb7f927/8150ca02-f879-471d-8913-459408f127a1-rescue.vmdk or device None with type thin {{(pid=62503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1138.629371] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cb561c4-5c34-4355-b96d-01c4a8eb50a6 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.656273] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f3846031-3960-4305-87ce-56169d61e36f {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.671211] env[62503]: DEBUG oslo_vmware.api [None req-818bf7ee-b7f4-4d0c-b0a6-90cc8f0af9ad tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Waiting for the task: (returnval){ [ 1138.671211] env[62503]: value = "task-1388283" [ 1138.671211] env[62503]: _type = "Task" [ 1138.671211] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1138.678758] env[62503]: DEBUG oslo_vmware.api [None req-818bf7ee-b7f4-4d0c-b0a6-90cc8f0af9ad tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Task: {'id': task-1388283, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1138.762791] env[62503]: DEBUG oslo_concurrency.lockutils [None req-7521548f-fc3e-4365-819b-172b0a1c7981 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Lock "477b21f4-cac1-48f1-862a-0b283b336d72" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1138.878802] env[62503]: INFO nova.scheduler.client.report [None req-bf098c1e-32f8-42b7-80fe-560805afcd4c tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Deleted allocation for migration 7a9474f6-b376-40f4-bfbf-18c7abb46a95 [ 1139.181532] env[62503]: DEBUG oslo_vmware.api [None req-818bf7ee-b7f4-4d0c-b0a6-90cc8f0af9ad tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Task: {'id': task-1388283, 'name': ReconfigVM_Task, 'duration_secs': 0.157295} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1139.181822] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-818bf7ee-b7f4-4d0c-b0a6-90cc8f0af9ad tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 94e69c2d-bf7a-42a8-a063-62ad1bb7f927] Powering on the VM {{(pid=62503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1139.182107] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bbb50a9b-dd49-462c-887d-019f72aba7a4 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.188255] env[62503]: DEBUG oslo_vmware.api [None req-818bf7ee-b7f4-4d0c-b0a6-90cc8f0af9ad tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Waiting for the task: (returnval){ [ 1139.188255] env[62503]: value = "task-1388284" [ 1139.188255] env[62503]: _type = "Task" [ 1139.188255] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1139.196498] env[62503]: DEBUG oslo_vmware.api [None req-818bf7ee-b7f4-4d0c-b0a6-90cc8f0af9ad tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Task: {'id': task-1388284, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1139.254753] env[62503]: INFO nova.compute.manager [None req-879965f2-f399-4db1-b827-233ff25e334e tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: b7c00736-b0c7-4f69-b47b-f904f84a8a2f] Get console output [ 1139.255139] env[62503]: WARNING nova.virt.vmwareapi.driver [None req-879965f2-f399-4db1-b827-233ff25e334e tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: b7c00736-b0c7-4f69-b47b-f904f84a8a2f] The console log is missing. Check your VSPC configuration [ 1139.384391] env[62503]: DEBUG oslo_concurrency.lockutils [None req-bf098c1e-32f8-42b7-80fe-560805afcd4c tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Lock "b7c00736-b0c7-4f69-b47b-f904f84a8a2f" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 6.553s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1139.697909] env[62503]: DEBUG oslo_vmware.api [None req-818bf7ee-b7f4-4d0c-b0a6-90cc8f0af9ad tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Task: {'id': task-1388284, 'name': PowerOnVM_Task, 'duration_secs': 0.422843} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1139.698196] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-818bf7ee-b7f4-4d0c-b0a6-90cc8f0af9ad tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 94e69c2d-bf7a-42a8-a063-62ad1bb7f927] Powered on the VM {{(pid=62503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1139.700898] env[62503]: DEBUG nova.compute.manager [None req-818bf7ee-b7f4-4d0c-b0a6-90cc8f0af9ad tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 94e69c2d-bf7a-42a8-a063-62ad1bb7f927] Checking state {{(pid=62503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1794}} [ 1139.701655] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-deba7d4d-8e2a-4237-93a7-93a7b59ef67e {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.825772] env[62503]: DEBUG oslo_concurrency.lockutils [None req-7521548f-fc3e-4365-819b-172b0a1c7981 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Acquiring lock "477b21f4-cac1-48f1-862a-0b283b336d72" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1139.826157] env[62503]: DEBUG oslo_concurrency.lockutils [None req-7521548f-fc3e-4365-819b-172b0a1c7981 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Lock "477b21f4-cac1-48f1-862a-0b283b336d72" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1139.826518] env[62503]: INFO nova.compute.manager [None req-7521548f-fc3e-4365-819b-172b0a1c7981 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: 477b21f4-cac1-48f1-862a-0b283b336d72] Attaching volume 41a239b5-982e-4e68-9114-474056b264c6 to /dev/sdb [ 1139.884237] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35e73e1e-0de5-4d1f-84c8-53d7f9f9d532 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.891287] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22e2bc6e-3583-40de-bfb8-60e1b243baef {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.904185] env[62503]: DEBUG nova.virt.block_device [None req-7521548f-fc3e-4365-819b-172b0a1c7981 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: 477b21f4-cac1-48f1-862a-0b283b336d72] Updating existing volume attachment record: ffbe59ef-aa4a-4357-bd96-b0249c73ea09 {{(pid=62503) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1140.547520] env[62503]: INFO nova.compute.manager [None req-81864057-bce3-43e9-b87c-a9cc70046c38 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 94e69c2d-bf7a-42a8-a063-62ad1bb7f927] Unrescuing [ 1140.547822] env[62503]: DEBUG oslo_concurrency.lockutils [None req-81864057-bce3-43e9-b87c-a9cc70046c38 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Acquiring lock "refresh_cache-94e69c2d-bf7a-42a8-a063-62ad1bb7f927" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1140.547997] env[62503]: DEBUG oslo_concurrency.lockutils [None req-81864057-bce3-43e9-b87c-a9cc70046c38 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Acquired lock "refresh_cache-94e69c2d-bf7a-42a8-a063-62ad1bb7f927" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1140.548200] env[62503]: DEBUG nova.network.neutron [None req-81864057-bce3-43e9-b87c-a9cc70046c38 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 94e69c2d-bf7a-42a8-a063-62ad1bb7f927] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1141.276137] env[62503]: DEBUG nova.network.neutron [None req-81864057-bce3-43e9-b87c-a9cc70046c38 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 94e69c2d-bf7a-42a8-a063-62ad1bb7f927] Updating instance_info_cache with network_info: [{"id": "0a00591c-6583-407d-bf50-60a53719508b", "address": "fa:16:3e:71:8a:86", "network": {"id": "62a43567-d25a-406f-a4c8-68e056f67595", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-156718245-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.234", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b86eee9480274a9196fc8ccd920671f0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b8b5b5e2-866d-4ab5-b74d-4a47de0c4877", "external-id": "nsx-vlan-transportzone-74", "segmentation_id": 74, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0a00591c-65", "ovs_interfaceid": "0a00591c-6583-407d-bf50-60a53719508b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1141.778732] env[62503]: DEBUG oslo_concurrency.lockutils [None req-81864057-bce3-43e9-b87c-a9cc70046c38 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Releasing lock "refresh_cache-94e69c2d-bf7a-42a8-a063-62ad1bb7f927" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1141.779482] env[62503]: DEBUG nova.objects.instance [None req-81864057-bce3-43e9-b87c-a9cc70046c38 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Lazy-loading 'flavor' on Instance uuid 94e69c2d-bf7a-42a8-a063-62ad1bb7f927 {{(pid=62503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1142.287785] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b06d0767-44b8-48c7-8e70-3f696f292051 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.311508] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-81864057-bce3-43e9-b87c-a9cc70046c38 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 94e69c2d-bf7a-42a8-a063-62ad1bb7f927] Powering off the VM {{(pid=62503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1142.311890] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c4686736-86a5-4f1a-9c8b-7278a60bd2cb {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.317848] env[62503]: DEBUG oslo_vmware.api [None req-81864057-bce3-43e9-b87c-a9cc70046c38 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Waiting for the task: (returnval){ [ 1142.317848] env[62503]: value = "task-1388287" [ 1142.317848] env[62503]: _type = "Task" [ 1142.317848] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1142.324849] env[62503]: DEBUG oslo_vmware.api [None req-81864057-bce3-43e9-b87c-a9cc70046c38 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Task: {'id': task-1388287, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1142.828380] env[62503]: DEBUG oslo_vmware.api [None req-81864057-bce3-43e9-b87c-a9cc70046c38 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Task: {'id': task-1388287, 'name': PowerOffVM_Task, 'duration_secs': 0.198961} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1142.828720] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-81864057-bce3-43e9-b87c-a9cc70046c38 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 94e69c2d-bf7a-42a8-a063-62ad1bb7f927] Powered off the VM {{(pid=62503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1142.833899] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-81864057-bce3-43e9-b87c-a9cc70046c38 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 94e69c2d-bf7a-42a8-a063-62ad1bb7f927] Reconfiguring VM instance instance-0000006a to detach disk 2002 {{(pid=62503) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1142.834259] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9ce9b4fc-ced0-4647-a11b-b1afea5c8fb8 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.851351] env[62503]: DEBUG oslo_vmware.api [None req-81864057-bce3-43e9-b87c-a9cc70046c38 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Waiting for the task: (returnval){ [ 1142.851351] env[62503]: value = "task-1388288" [ 1142.851351] env[62503]: _type = "Task" [ 1142.851351] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1142.858842] env[62503]: DEBUG oslo_vmware.api [None req-81864057-bce3-43e9-b87c-a9cc70046c38 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Task: {'id': task-1388288, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1143.361363] env[62503]: DEBUG oslo_vmware.api [None req-81864057-bce3-43e9-b87c-a9cc70046c38 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Task: {'id': task-1388288, 'name': ReconfigVM_Task, 'duration_secs': 0.234082} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1143.361660] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-81864057-bce3-43e9-b87c-a9cc70046c38 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 94e69c2d-bf7a-42a8-a063-62ad1bb7f927] Reconfigured VM instance instance-0000006a to detach disk 2002 {{(pid=62503) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1143.361854] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-81864057-bce3-43e9-b87c-a9cc70046c38 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 94e69c2d-bf7a-42a8-a063-62ad1bb7f927] Powering on the VM {{(pid=62503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1143.362132] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c0d8f3b6-604c-4f5e-b1f2-289e6d045865 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.368102] env[62503]: DEBUG oslo_vmware.api [None req-81864057-bce3-43e9-b87c-a9cc70046c38 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Waiting for the task: (returnval){ [ 1143.368102] env[62503]: value = "task-1388289" [ 1143.368102] env[62503]: _type = "Task" [ 1143.368102] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1143.375166] env[62503]: DEBUG oslo_vmware.api [None req-81864057-bce3-43e9-b87c-a9cc70046c38 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Task: {'id': task-1388289, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1143.878057] env[62503]: DEBUG oslo_vmware.api [None req-81864057-bce3-43e9-b87c-a9cc70046c38 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Task: {'id': task-1388289, 'name': PowerOnVM_Task, 'duration_secs': 0.46582} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1143.878057] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-81864057-bce3-43e9-b87c-a9cc70046c38 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 94e69c2d-bf7a-42a8-a063-62ad1bb7f927] Powered on the VM {{(pid=62503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1143.878057] env[62503]: DEBUG nova.compute.manager [None req-81864057-bce3-43e9-b87c-a9cc70046c38 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 94e69c2d-bf7a-42a8-a063-62ad1bb7f927] Checking state {{(pid=62503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1794}} [ 1143.878667] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01dcd85e-53ca-4845-ae0f-e2e03c2cd606 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.448168] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-7521548f-fc3e-4365-819b-172b0a1c7981 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: 477b21f4-cac1-48f1-862a-0b283b336d72] Volume attach. Driver type: vmdk {{(pid=62503) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1144.448435] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-7521548f-fc3e-4365-819b-172b0a1c7981 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: 477b21f4-cac1-48f1-862a-0b283b336d72] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-294660', 'volume_id': '41a239b5-982e-4e68-9114-474056b264c6', 'name': 'volume-41a239b5-982e-4e68-9114-474056b264c6', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '477b21f4-cac1-48f1-862a-0b283b336d72', 'attached_at': '', 'detached_at': '', 'volume_id': '41a239b5-982e-4e68-9114-474056b264c6', 'serial': '41a239b5-982e-4e68-9114-474056b264c6'} {{(pid=62503) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1144.449318] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71f40aeb-b152-4eea-a2cc-d4043ca632da {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.465348] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9b55b95-db34-4ad3-b6b2-7bcfc68d1c08 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.492191] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-7521548f-fc3e-4365-819b-172b0a1c7981 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: 477b21f4-cac1-48f1-862a-0b283b336d72] Reconfiguring VM instance instance-0000006b to attach disk [datastore1] volume-41a239b5-982e-4e68-9114-474056b264c6/volume-41a239b5-982e-4e68-9114-474056b264c6.vmdk or device None with type thin {{(pid=62503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1144.493048] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8bc57234-ee1f-407b-8f5b-cdbcedb10a90 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.511158] env[62503]: DEBUG oslo_vmware.api [None req-7521548f-fc3e-4365-819b-172b0a1c7981 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Waiting for the task: (returnval){ [ 1144.511158] env[62503]: value = "task-1388290" [ 1144.511158] env[62503]: _type = "Task" [ 1144.511158] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1144.519094] env[62503]: DEBUG oslo_vmware.api [None req-7521548f-fc3e-4365-819b-172b0a1c7981 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Task: {'id': task-1388290, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1145.027168] env[62503]: DEBUG oslo_vmware.api [None req-7521548f-fc3e-4365-819b-172b0a1c7981 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Task: {'id': task-1388290, 'name': ReconfigVM_Task, 'duration_secs': 0.34123} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1145.027168] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-7521548f-fc3e-4365-819b-172b0a1c7981 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: 477b21f4-cac1-48f1-862a-0b283b336d72] Reconfigured VM instance instance-0000006b to attach disk [datastore1] volume-41a239b5-982e-4e68-9114-474056b264c6/volume-41a239b5-982e-4e68-9114-474056b264c6.vmdk or device None with type thin {{(pid=62503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1145.031401] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-776cf1de-8b81-446c-9926-e7420ee1db5e {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.042946] env[62503]: DEBUG nova.compute.manager [req-df075b5b-99a9-47b1-9f57-9d4a947373db req-d628e858-f4ff-4ce9-abf5-b531b8820464 service nova] [instance: 94e69c2d-bf7a-42a8-a063-62ad1bb7f927] Received event network-changed-0a00591c-6583-407d-bf50-60a53719508b {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 1145.043186] env[62503]: DEBUG nova.compute.manager [req-df075b5b-99a9-47b1-9f57-9d4a947373db req-d628e858-f4ff-4ce9-abf5-b531b8820464 service nova] [instance: 94e69c2d-bf7a-42a8-a063-62ad1bb7f927] Refreshing instance network info cache due to event network-changed-0a00591c-6583-407d-bf50-60a53719508b. {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11432}} [ 1145.043409] env[62503]: DEBUG oslo_concurrency.lockutils [req-df075b5b-99a9-47b1-9f57-9d4a947373db req-d628e858-f4ff-4ce9-abf5-b531b8820464 service nova] Acquiring lock "refresh_cache-94e69c2d-bf7a-42a8-a063-62ad1bb7f927" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1145.043557] env[62503]: DEBUG oslo_concurrency.lockutils [req-df075b5b-99a9-47b1-9f57-9d4a947373db req-d628e858-f4ff-4ce9-abf5-b531b8820464 service nova] Acquired lock "refresh_cache-94e69c2d-bf7a-42a8-a063-62ad1bb7f927" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1145.043723] env[62503]: DEBUG nova.network.neutron [req-df075b5b-99a9-47b1-9f57-9d4a947373db req-d628e858-f4ff-4ce9-abf5-b531b8820464 service nova] [instance: 94e69c2d-bf7a-42a8-a063-62ad1bb7f927] Refreshing network info cache for port 0a00591c-6583-407d-bf50-60a53719508b {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1145.049853] env[62503]: DEBUG oslo_vmware.api [None req-7521548f-fc3e-4365-819b-172b0a1c7981 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Waiting for the task: (returnval){ [ 1145.049853] env[62503]: value = "task-1388291" [ 1145.049853] env[62503]: _type = "Task" [ 1145.049853] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1145.058513] env[62503]: DEBUG oslo_vmware.api [None req-7521548f-fc3e-4365-819b-172b0a1c7981 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Task: {'id': task-1388291, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1145.559965] env[62503]: DEBUG oslo_vmware.api [None req-7521548f-fc3e-4365-819b-172b0a1c7981 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Task: {'id': task-1388291, 'name': ReconfigVM_Task, 'duration_secs': 0.140364} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1145.560248] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-7521548f-fc3e-4365-819b-172b0a1c7981 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: 477b21f4-cac1-48f1-862a-0b283b336d72] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-294660', 'volume_id': '41a239b5-982e-4e68-9114-474056b264c6', 'name': 'volume-41a239b5-982e-4e68-9114-474056b264c6', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '477b21f4-cac1-48f1-862a-0b283b336d72', 'attached_at': '', 'detached_at': '', 'volume_id': '41a239b5-982e-4e68-9114-474056b264c6', 'serial': '41a239b5-982e-4e68-9114-474056b264c6'} {{(pid=62503) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1145.744554] env[62503]: DEBUG nova.network.neutron [req-df075b5b-99a9-47b1-9f57-9d4a947373db req-d628e858-f4ff-4ce9-abf5-b531b8820464 service nova] [instance: 94e69c2d-bf7a-42a8-a063-62ad1bb7f927] Updated VIF entry in instance network info cache for port 0a00591c-6583-407d-bf50-60a53719508b. {{(pid=62503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1145.745011] env[62503]: DEBUG nova.network.neutron [req-df075b5b-99a9-47b1-9f57-9d4a947373db req-d628e858-f4ff-4ce9-abf5-b531b8820464 service nova] [instance: 94e69c2d-bf7a-42a8-a063-62ad1bb7f927] Updating instance_info_cache with network_info: [{"id": "0a00591c-6583-407d-bf50-60a53719508b", "address": "fa:16:3e:71:8a:86", "network": {"id": "62a43567-d25a-406f-a4c8-68e056f67595", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-156718245-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.234", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b86eee9480274a9196fc8ccd920671f0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b8b5b5e2-866d-4ab5-b74d-4a47de0c4877", "external-id": "nsx-vlan-transportzone-74", "segmentation_id": 74, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0a00591c-65", "ovs_interfaceid": "0a00591c-6583-407d-bf50-60a53719508b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1146.248141] env[62503]: DEBUG oslo_concurrency.lockutils [req-df075b5b-99a9-47b1-9f57-9d4a947373db req-d628e858-f4ff-4ce9-abf5-b531b8820464 service nova] Releasing lock "refresh_cache-94e69c2d-bf7a-42a8-a063-62ad1bb7f927" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1146.594333] env[62503]: DEBUG nova.objects.instance [None req-7521548f-fc3e-4365-819b-172b0a1c7981 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Lazy-loading 'flavor' on Instance uuid 477b21f4-cac1-48f1-862a-0b283b336d72 {{(pid=62503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1147.068494] env[62503]: DEBUG nova.compute.manager [req-44a8d2ad-4d18-413b-a1b9-23e307b3832e req-5ba8c8a5-a610-4376-9d84-a0d6080913ac service nova] [instance: 94e69c2d-bf7a-42a8-a063-62ad1bb7f927] Received event network-changed-0a00591c-6583-407d-bf50-60a53719508b {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 1147.068704] env[62503]: DEBUG nova.compute.manager [req-44a8d2ad-4d18-413b-a1b9-23e307b3832e req-5ba8c8a5-a610-4376-9d84-a0d6080913ac service nova] [instance: 94e69c2d-bf7a-42a8-a063-62ad1bb7f927] Refreshing instance network info cache due to event network-changed-0a00591c-6583-407d-bf50-60a53719508b. {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11432}} [ 1147.068928] env[62503]: DEBUG oslo_concurrency.lockutils [req-44a8d2ad-4d18-413b-a1b9-23e307b3832e req-5ba8c8a5-a610-4376-9d84-a0d6080913ac service nova] Acquiring lock "refresh_cache-94e69c2d-bf7a-42a8-a063-62ad1bb7f927" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1147.069092] env[62503]: DEBUG oslo_concurrency.lockutils [req-44a8d2ad-4d18-413b-a1b9-23e307b3832e req-5ba8c8a5-a610-4376-9d84-a0d6080913ac service nova] Acquired lock "refresh_cache-94e69c2d-bf7a-42a8-a063-62ad1bb7f927" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1147.069806] env[62503]: DEBUG nova.network.neutron [req-44a8d2ad-4d18-413b-a1b9-23e307b3832e req-5ba8c8a5-a610-4376-9d84-a0d6080913ac service nova] [instance: 94e69c2d-bf7a-42a8-a063-62ad1bb7f927] Refreshing network info cache for port 0a00591c-6583-407d-bf50-60a53719508b {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1147.098822] env[62503]: DEBUG oslo_concurrency.lockutils [None req-7521548f-fc3e-4365-819b-172b0a1c7981 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Lock "477b21f4-cac1-48f1-862a-0b283b336d72" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.273s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1147.240537] env[62503]: DEBUG oslo_concurrency.lockutils [None req-bb42fd04-f126-48d3-87cc-6018ef0deaf4 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Acquiring lock "477b21f4-cac1-48f1-862a-0b283b336d72" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1147.240799] env[62503]: DEBUG oslo_concurrency.lockutils [None req-bb42fd04-f126-48d3-87cc-6018ef0deaf4 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Lock "477b21f4-cac1-48f1-862a-0b283b336d72" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1147.745152] env[62503]: INFO nova.compute.manager [None req-bb42fd04-f126-48d3-87cc-6018ef0deaf4 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: 477b21f4-cac1-48f1-862a-0b283b336d72] Detaching volume 41a239b5-982e-4e68-9114-474056b264c6 [ 1147.776926] env[62503]: DEBUG nova.network.neutron [req-44a8d2ad-4d18-413b-a1b9-23e307b3832e req-5ba8c8a5-a610-4376-9d84-a0d6080913ac service nova] [instance: 94e69c2d-bf7a-42a8-a063-62ad1bb7f927] Updated VIF entry in instance network info cache for port 0a00591c-6583-407d-bf50-60a53719508b. {{(pid=62503) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1147.777367] env[62503]: DEBUG nova.network.neutron [req-44a8d2ad-4d18-413b-a1b9-23e307b3832e req-5ba8c8a5-a610-4376-9d84-a0d6080913ac service nova] [instance: 94e69c2d-bf7a-42a8-a063-62ad1bb7f927] Updating instance_info_cache with network_info: [{"id": "0a00591c-6583-407d-bf50-60a53719508b", "address": "fa:16:3e:71:8a:86", "network": {"id": "62a43567-d25a-406f-a4c8-68e056f67595", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-156718245-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.234", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b86eee9480274a9196fc8ccd920671f0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b8b5b5e2-866d-4ab5-b74d-4a47de0c4877", "external-id": "nsx-vlan-transportzone-74", "segmentation_id": 74, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0a00591c-65", "ovs_interfaceid": "0a00591c-6583-407d-bf50-60a53719508b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1147.780753] env[62503]: INFO nova.virt.block_device [None req-bb42fd04-f126-48d3-87cc-6018ef0deaf4 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: 477b21f4-cac1-48f1-862a-0b283b336d72] Attempting to driver detach volume 41a239b5-982e-4e68-9114-474056b264c6 from mountpoint /dev/sdb [ 1147.780993] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-bb42fd04-f126-48d3-87cc-6018ef0deaf4 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: 477b21f4-cac1-48f1-862a-0b283b336d72] Volume detach. Driver type: vmdk {{(pid=62503) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1147.781246] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-bb42fd04-f126-48d3-87cc-6018ef0deaf4 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: 477b21f4-cac1-48f1-862a-0b283b336d72] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-294660', 'volume_id': '41a239b5-982e-4e68-9114-474056b264c6', 'name': 'volume-41a239b5-982e-4e68-9114-474056b264c6', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '477b21f4-cac1-48f1-862a-0b283b336d72', 'attached_at': '', 'detached_at': '', 'volume_id': '41a239b5-982e-4e68-9114-474056b264c6', 'serial': '41a239b5-982e-4e68-9114-474056b264c6'} {{(pid=62503) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1147.782160] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2939db5f-1375-4589-aa05-5fb295793df4 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.804430] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e569ed0-94e7-40c9-843b-a81904bc7107 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.811225] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed5cc5f6-d449-41a7-b9fd-1c418ba832c0 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.830556] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47f0b0e8-b9a0-4676-a23f-8fb8908770fc {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.844172] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-bb42fd04-f126-48d3-87cc-6018ef0deaf4 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] The volume has not been displaced from its original location: [datastore1] volume-41a239b5-982e-4e68-9114-474056b264c6/volume-41a239b5-982e-4e68-9114-474056b264c6.vmdk. No consolidation needed. {{(pid=62503) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1147.849203] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-bb42fd04-f126-48d3-87cc-6018ef0deaf4 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: 477b21f4-cac1-48f1-862a-0b283b336d72] Reconfiguring VM instance instance-0000006b to detach disk 2001 {{(pid=62503) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1147.849710] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6360c64e-21aa-442e-901d-7f65e18b941f {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.866016] env[62503]: DEBUG oslo_vmware.api [None req-bb42fd04-f126-48d3-87cc-6018ef0deaf4 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Waiting for the task: (returnval){ [ 1147.866016] env[62503]: value = "task-1388292" [ 1147.866016] env[62503]: _type = "Task" [ 1147.866016] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1147.873288] env[62503]: DEBUG oslo_vmware.api [None req-bb42fd04-f126-48d3-87cc-6018ef0deaf4 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Task: {'id': task-1388292, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1148.280208] env[62503]: DEBUG oslo_concurrency.lockutils [req-44a8d2ad-4d18-413b-a1b9-23e307b3832e req-5ba8c8a5-a610-4376-9d84-a0d6080913ac service nova] Releasing lock "refresh_cache-94e69c2d-bf7a-42a8-a063-62ad1bb7f927" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1148.376170] env[62503]: DEBUG oslo_vmware.api [None req-bb42fd04-f126-48d3-87cc-6018ef0deaf4 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Task: {'id': task-1388292, 'name': ReconfigVM_Task, 'duration_secs': 0.233571} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1148.376442] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-bb42fd04-f126-48d3-87cc-6018ef0deaf4 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: 477b21f4-cac1-48f1-862a-0b283b336d72] Reconfigured VM instance instance-0000006b to detach disk 2001 {{(pid=62503) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1148.380915] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b6341722-eba5-48b2-bdac-b28385e61e01 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.394656] env[62503]: DEBUG oslo_vmware.api [None req-bb42fd04-f126-48d3-87cc-6018ef0deaf4 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Waiting for the task: (returnval){ [ 1148.394656] env[62503]: value = "task-1388293" [ 1148.394656] env[62503]: _type = "Task" [ 1148.394656] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1148.403401] env[62503]: DEBUG oslo_vmware.api [None req-bb42fd04-f126-48d3-87cc-6018ef0deaf4 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Task: {'id': task-1388293, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1148.904497] env[62503]: DEBUG oslo_vmware.api [None req-bb42fd04-f126-48d3-87cc-6018ef0deaf4 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Task: {'id': task-1388293, 'name': ReconfigVM_Task, 'duration_secs': 0.128573} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1148.904859] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-bb42fd04-f126-48d3-87cc-6018ef0deaf4 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: 477b21f4-cac1-48f1-862a-0b283b336d72] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-294660', 'volume_id': '41a239b5-982e-4e68-9114-474056b264c6', 'name': 'volume-41a239b5-982e-4e68-9114-474056b264c6', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '477b21f4-cac1-48f1-862a-0b283b336d72', 'attached_at': '', 'detached_at': '', 'volume_id': '41a239b5-982e-4e68-9114-474056b264c6', 'serial': '41a239b5-982e-4e68-9114-474056b264c6'} {{(pid=62503) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1149.443276] env[62503]: DEBUG nova.objects.instance [None req-bb42fd04-f126-48d3-87cc-6018ef0deaf4 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Lazy-loading 'flavor' on Instance uuid 477b21f4-cac1-48f1-862a-0b283b336d72 {{(pid=62503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1150.450579] env[62503]: DEBUG oslo_concurrency.lockutils [None req-bb42fd04-f126-48d3-87cc-6018ef0deaf4 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Lock "477b21f4-cac1-48f1-862a-0b283b336d72" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.210s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1151.512790] env[62503]: DEBUG oslo_concurrency.lockutils [None req-31e5f348-5a8e-492a-a689-473922972757 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Acquiring lock "477b21f4-cac1-48f1-862a-0b283b336d72" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1151.513287] env[62503]: DEBUG oslo_concurrency.lockutils [None req-31e5f348-5a8e-492a-a689-473922972757 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Lock "477b21f4-cac1-48f1-862a-0b283b336d72" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1151.513404] env[62503]: DEBUG oslo_concurrency.lockutils [None req-31e5f348-5a8e-492a-a689-473922972757 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Acquiring lock "477b21f4-cac1-48f1-862a-0b283b336d72-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1151.513602] env[62503]: DEBUG oslo_concurrency.lockutils [None req-31e5f348-5a8e-492a-a689-473922972757 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Lock "477b21f4-cac1-48f1-862a-0b283b336d72-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1151.513782] env[62503]: DEBUG oslo_concurrency.lockutils [None req-31e5f348-5a8e-492a-a689-473922972757 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Lock "477b21f4-cac1-48f1-862a-0b283b336d72-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1151.515939] env[62503]: INFO nova.compute.manager [None req-31e5f348-5a8e-492a-a689-473922972757 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: 477b21f4-cac1-48f1-862a-0b283b336d72] Terminating instance [ 1151.517593] env[62503]: DEBUG nova.compute.manager [None req-31e5f348-5a8e-492a-a689-473922972757 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: 477b21f4-cac1-48f1-862a-0b283b336d72] Start destroying the instance on the hypervisor. {{(pid=62503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3156}} [ 1151.517793] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-31e5f348-5a8e-492a-a689-473922972757 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: 477b21f4-cac1-48f1-862a-0b283b336d72] Destroying instance {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1151.518652] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21acd32c-0e52-4e5c-bbbe-e416aa12d2fd {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.526803] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-31e5f348-5a8e-492a-a689-473922972757 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: 477b21f4-cac1-48f1-862a-0b283b336d72] Powering off the VM {{(pid=62503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1151.527326] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7969c441-63e4-428f-98cf-d45b98be66cf {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.533717] env[62503]: DEBUG oslo_vmware.api [None req-31e5f348-5a8e-492a-a689-473922972757 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Waiting for the task: (returnval){ [ 1151.533717] env[62503]: value = "task-1388294" [ 1151.533717] env[62503]: _type = "Task" [ 1151.533717] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1151.541343] env[62503]: DEBUG oslo_vmware.api [None req-31e5f348-5a8e-492a-a689-473922972757 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Task: {'id': task-1388294, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1152.043717] env[62503]: DEBUG oslo_vmware.api [None req-31e5f348-5a8e-492a-a689-473922972757 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Task: {'id': task-1388294, 'name': PowerOffVM_Task, 'duration_secs': 0.219645} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1152.043979] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-31e5f348-5a8e-492a-a689-473922972757 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: 477b21f4-cac1-48f1-862a-0b283b336d72] Powered off the VM {{(pid=62503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1152.044201] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-31e5f348-5a8e-492a-a689-473922972757 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: 477b21f4-cac1-48f1-862a-0b283b336d72] Unregistering the VM {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1152.044469] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-60b10075-c2e2-4f0d-9fb1-86d1f7e77121 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.119018] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-31e5f348-5a8e-492a-a689-473922972757 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: 477b21f4-cac1-48f1-862a-0b283b336d72] Unregistered the VM {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1152.119286] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-31e5f348-5a8e-492a-a689-473922972757 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: 477b21f4-cac1-48f1-862a-0b283b336d72] Deleting contents of the VM from datastore datastore2 {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1152.119481] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-31e5f348-5a8e-492a-a689-473922972757 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Deleting the datastore file [datastore2] 477b21f4-cac1-48f1-862a-0b283b336d72 {{(pid=62503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1152.119752] env[62503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7b67b351-ad17-4e16-906c-090e18896375 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.127606] env[62503]: DEBUG oslo_vmware.api [None req-31e5f348-5a8e-492a-a689-473922972757 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Waiting for the task: (returnval){ [ 1152.127606] env[62503]: value = "task-1388296" [ 1152.127606] env[62503]: _type = "Task" [ 1152.127606] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1152.135121] env[62503]: DEBUG oslo_vmware.api [None req-31e5f348-5a8e-492a-a689-473922972757 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Task: {'id': task-1388296, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1152.638270] env[62503]: DEBUG oslo_vmware.api [None req-31e5f348-5a8e-492a-a689-473922972757 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Task: {'id': task-1388296, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.129523} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1152.638534] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-31e5f348-5a8e-492a-a689-473922972757 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Deleted the datastore file {{(pid=62503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1152.638728] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-31e5f348-5a8e-492a-a689-473922972757 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: 477b21f4-cac1-48f1-862a-0b283b336d72] Deleted contents of the VM from datastore datastore2 {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1152.638905] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-31e5f348-5a8e-492a-a689-473922972757 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: 477b21f4-cac1-48f1-862a-0b283b336d72] Instance destroyed {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1152.639102] env[62503]: INFO nova.compute.manager [None req-31e5f348-5a8e-492a-a689-473922972757 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] [instance: 477b21f4-cac1-48f1-862a-0b283b336d72] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1152.639347] env[62503]: DEBUG oslo.service.loopingcall [None req-31e5f348-5a8e-492a-a689-473922972757 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1152.639548] env[62503]: DEBUG nova.compute.manager [-] [instance: 477b21f4-cac1-48f1-862a-0b283b336d72] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 1152.639644] env[62503]: DEBUG nova.network.neutron [-] [instance: 477b21f4-cac1-48f1-862a-0b283b336d72] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1153.041993] env[62503]: DEBUG nova.compute.manager [req-fd0e5183-1814-4c45-970a-d60e638d641b req-b2441605-8c59-4d16-801f-dd5f437d45f6 service nova] [instance: 477b21f4-cac1-48f1-862a-0b283b336d72] Received event network-vif-deleted-5a42bd1b-f70d-4aba-8070-8636cad420fc {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 1153.041993] env[62503]: INFO nova.compute.manager [req-fd0e5183-1814-4c45-970a-d60e638d641b req-b2441605-8c59-4d16-801f-dd5f437d45f6 service nova] [instance: 477b21f4-cac1-48f1-862a-0b283b336d72] Neutron deleted interface 5a42bd1b-f70d-4aba-8070-8636cad420fc; detaching it from the instance and deleting it from the info cache [ 1153.042280] env[62503]: DEBUG nova.network.neutron [req-fd0e5183-1814-4c45-970a-d60e638d641b req-b2441605-8c59-4d16-801f-dd5f437d45f6 service nova] [instance: 477b21f4-cac1-48f1-862a-0b283b336d72] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1153.521474] env[62503]: DEBUG nova.network.neutron [-] [instance: 477b21f4-cac1-48f1-862a-0b283b336d72] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1153.545875] env[62503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-89783011-913b-4fa4-91c0-8fe611698137 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.557765] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e19cc9b7-dfa6-4071-8f9c-cde6d78b9930 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.588689] env[62503]: DEBUG nova.compute.manager [req-fd0e5183-1814-4c45-970a-d60e638d641b req-b2441605-8c59-4d16-801f-dd5f437d45f6 service nova] [instance: 477b21f4-cac1-48f1-862a-0b283b336d72] Detach interface failed, port_id=5a42bd1b-f70d-4aba-8070-8636cad420fc, reason: Instance 477b21f4-cac1-48f1-862a-0b283b336d72 could not be found. {{(pid=62503) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11261}} [ 1154.024641] env[62503]: INFO nova.compute.manager [-] [instance: 477b21f4-cac1-48f1-862a-0b283b336d72] Took 1.38 seconds to deallocate network for instance. [ 1154.531493] env[62503]: DEBUG oslo_concurrency.lockutils [None req-31e5f348-5a8e-492a-a689-473922972757 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1154.531777] env[62503]: DEBUG oslo_concurrency.lockutils [None req-31e5f348-5a8e-492a-a689-473922972757 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1154.532010] env[62503]: DEBUG nova.objects.instance [None req-31e5f348-5a8e-492a-a689-473922972757 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Lazy-loading 'resources' on Instance uuid 477b21f4-cac1-48f1-862a-0b283b336d72 {{(pid=62503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1155.122077] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f07d35cc-9a98-47e3-9257-a71fe69b28b0 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.129254] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f955218b-d17b-4a9a-94db-f3a6e725d763 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.158453] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8011254-2d22-420b-b409-76e69d6eecb9 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.165450] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bacbbc49-d371-4bbf-aa4c-dfdc69eca642 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.179341] env[62503]: DEBUG nova.compute.provider_tree [None req-31e5f348-5a8e-492a-a689-473922972757 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1155.683181] env[62503]: DEBUG nova.scheduler.client.report [None req-31e5f348-5a8e-492a-a689-473922972757 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1156.188955] env[62503]: DEBUG oslo_concurrency.lockutils [None req-31e5f348-5a8e-492a-a689-473922972757 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.657s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1156.208926] env[62503]: INFO nova.scheduler.client.report [None req-31e5f348-5a8e-492a-a689-473922972757 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Deleted allocations for instance 477b21f4-cac1-48f1-862a-0b283b336d72 [ 1156.718595] env[62503]: DEBUG oslo_concurrency.lockutils [None req-31e5f348-5a8e-492a-a689-473922972757 tempest-AttachVolumeNegativeTest-1950166676 tempest-AttachVolumeNegativeTest-1950166676-project-member] Lock "477b21f4-cac1-48f1-862a-0b283b336d72" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.205s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1165.439962] env[62503]: DEBUG oslo_concurrency.lockutils [None req-a76f4b22-93f7-479e-8212-f375e8a99584 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Acquiring lock "b7c00736-b0c7-4f69-b47b-f904f84a8a2f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1165.440365] env[62503]: DEBUG oslo_concurrency.lockutils [None req-a76f4b22-93f7-479e-8212-f375e8a99584 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Lock "b7c00736-b0c7-4f69-b47b-f904f84a8a2f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1165.440539] env[62503]: DEBUG oslo_concurrency.lockutils [None req-a76f4b22-93f7-479e-8212-f375e8a99584 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Acquiring lock "b7c00736-b0c7-4f69-b47b-f904f84a8a2f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1165.440618] env[62503]: DEBUG oslo_concurrency.lockutils [None req-a76f4b22-93f7-479e-8212-f375e8a99584 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Lock "b7c00736-b0c7-4f69-b47b-f904f84a8a2f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1165.440806] env[62503]: DEBUG oslo_concurrency.lockutils [None req-a76f4b22-93f7-479e-8212-f375e8a99584 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Lock "b7c00736-b0c7-4f69-b47b-f904f84a8a2f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1165.443134] env[62503]: INFO nova.compute.manager [None req-a76f4b22-93f7-479e-8212-f375e8a99584 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: b7c00736-b0c7-4f69-b47b-f904f84a8a2f] Terminating instance [ 1165.444968] env[62503]: DEBUG nova.compute.manager [None req-a76f4b22-93f7-479e-8212-f375e8a99584 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: b7c00736-b0c7-4f69-b47b-f904f84a8a2f] Start destroying the instance on the hypervisor. {{(pid=62503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3156}} [ 1165.445200] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-a76f4b22-93f7-479e-8212-f375e8a99584 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: b7c00736-b0c7-4f69-b47b-f904f84a8a2f] Powering off the VM {{(pid=62503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1165.445435] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-967d9125-58aa-4437-a5f4-084968ab7bae {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.453152] env[62503]: DEBUG oslo_vmware.api [None req-a76f4b22-93f7-479e-8212-f375e8a99584 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Waiting for the task: (returnval){ [ 1165.453152] env[62503]: value = "task-1388300" [ 1165.453152] env[62503]: _type = "Task" [ 1165.453152] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1165.462821] env[62503]: DEBUG oslo_vmware.api [None req-a76f4b22-93f7-479e-8212-f375e8a99584 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1388300, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1165.964216] env[62503]: DEBUG oslo_vmware.api [None req-a76f4b22-93f7-479e-8212-f375e8a99584 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1388300, 'name': PowerOffVM_Task, 'duration_secs': 0.193296} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1165.964216] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-a76f4b22-93f7-479e-8212-f375e8a99584 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: b7c00736-b0c7-4f69-b47b-f904f84a8a2f] Powered off the VM {{(pid=62503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1165.964216] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-a76f4b22-93f7-479e-8212-f375e8a99584 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: b7c00736-b0c7-4f69-b47b-f904f84a8a2f] Volume detach. Driver type: vmdk {{(pid=62503) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1165.964216] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-a76f4b22-93f7-479e-8212-f375e8a99584 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: b7c00736-b0c7-4f69-b47b-f904f84a8a2f] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-294655', 'volume_id': '70581b1b-a294-4141-b6cc-480701c30979', 'name': 'volume-70581b1b-a294-4141-b6cc-480701c30979', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attaching', 'instance': 'b7c00736-b0c7-4f69-b47b-f904f84a8a2f', 'attached_at': '2024-10-31T11:37:53.000000', 'detached_at': '', 'volume_id': '70581b1b-a294-4141-b6cc-480701c30979', 'serial': '70581b1b-a294-4141-b6cc-480701c30979'} {{(pid=62503) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1165.964960] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f9336da-33d5-4a2f-a375-1fc2480b5b21 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.983041] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fac68d5-07aa-42f2-8599-0b0d0739672a {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.989271] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3592f9c4-0b55-410e-a3f0-63e21eccfe3c {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.007754] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-760719a7-ce27-4359-85c8-3d8f22b96643 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.021697] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-a76f4b22-93f7-479e-8212-f375e8a99584 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] The volume has not been displaced from its original location: [datastore2] volume-70581b1b-a294-4141-b6cc-480701c30979/volume-70581b1b-a294-4141-b6cc-480701c30979.vmdk. No consolidation needed. {{(pid=62503) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1166.026871] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-a76f4b22-93f7-479e-8212-f375e8a99584 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: b7c00736-b0c7-4f69-b47b-f904f84a8a2f] Reconfiguring VM instance instance-0000006c to detach disk 2000 {{(pid=62503) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1166.027148] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bcb11b29-0c5d-4e93-bfaf-29d0edec5265 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.044510] env[62503]: DEBUG oslo_vmware.api [None req-a76f4b22-93f7-479e-8212-f375e8a99584 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Waiting for the task: (returnval){ [ 1166.044510] env[62503]: value = "task-1388301" [ 1166.044510] env[62503]: _type = "Task" [ 1166.044510] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1166.052294] env[62503]: DEBUG oslo_vmware.api [None req-a76f4b22-93f7-479e-8212-f375e8a99584 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1388301, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1166.555343] env[62503]: DEBUG oslo_vmware.api [None req-a76f4b22-93f7-479e-8212-f375e8a99584 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1388301, 'name': ReconfigVM_Task, 'duration_secs': 0.162367} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1166.555343] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-a76f4b22-93f7-479e-8212-f375e8a99584 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: b7c00736-b0c7-4f69-b47b-f904f84a8a2f] Reconfigured VM instance instance-0000006c to detach disk 2000 {{(pid=62503) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1166.560023] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d9b50ee0-4799-4231-941c-fe5729067fe4 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.574448] env[62503]: DEBUG oslo_vmware.api [None req-a76f4b22-93f7-479e-8212-f375e8a99584 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Waiting for the task: (returnval){ [ 1166.574448] env[62503]: value = "task-1388302" [ 1166.574448] env[62503]: _type = "Task" [ 1166.574448] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1166.582678] env[62503]: DEBUG oslo_vmware.api [None req-a76f4b22-93f7-479e-8212-f375e8a99584 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1388302, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1167.084081] env[62503]: DEBUG oslo_vmware.api [None req-a76f4b22-93f7-479e-8212-f375e8a99584 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1388302, 'name': ReconfigVM_Task, 'duration_secs': 0.174774} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1167.084413] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-a76f4b22-93f7-479e-8212-f375e8a99584 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: b7c00736-b0c7-4f69-b47b-f904f84a8a2f] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-294655', 'volume_id': '70581b1b-a294-4141-b6cc-480701c30979', 'name': 'volume-70581b1b-a294-4141-b6cc-480701c30979', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attaching', 'instance': 'b7c00736-b0c7-4f69-b47b-f904f84a8a2f', 'attached_at': '2024-10-31T11:37:53.000000', 'detached_at': '', 'volume_id': '70581b1b-a294-4141-b6cc-480701c30979', 'serial': '70581b1b-a294-4141-b6cc-480701c30979'} {{(pid=62503) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1167.084701] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-a76f4b22-93f7-479e-8212-f375e8a99584 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: b7c00736-b0c7-4f69-b47b-f904f84a8a2f] Destroying instance {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1167.085490] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-639b5c29-68ed-4d2e-89a9-f3956ff57577 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.091872] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-a76f4b22-93f7-479e-8212-f375e8a99584 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: b7c00736-b0c7-4f69-b47b-f904f84a8a2f] Unregistering the VM {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1167.092111] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-41c2b6e2-3600-446d-a162-f8ecf1c425e6 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.161352] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-a76f4b22-93f7-479e-8212-f375e8a99584 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: b7c00736-b0c7-4f69-b47b-f904f84a8a2f] Unregistered the VM {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1167.161632] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-a76f4b22-93f7-479e-8212-f375e8a99584 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: b7c00736-b0c7-4f69-b47b-f904f84a8a2f] Deleting contents of the VM from datastore datastore2 {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1167.161826] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-a76f4b22-93f7-479e-8212-f375e8a99584 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Deleting the datastore file [datastore2] b7c00736-b0c7-4f69-b47b-f904f84a8a2f {{(pid=62503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1167.162117] env[62503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-60b298a6-eae3-4dd1-871c-d03179c51ee1 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.169029] env[62503]: DEBUG oslo_vmware.api [None req-a76f4b22-93f7-479e-8212-f375e8a99584 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Waiting for the task: (returnval){ [ 1167.169029] env[62503]: value = "task-1388304" [ 1167.169029] env[62503]: _type = "Task" [ 1167.169029] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1167.176384] env[62503]: DEBUG oslo_vmware.api [None req-a76f4b22-93f7-479e-8212-f375e8a99584 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1388304, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1167.679127] env[62503]: DEBUG oslo_vmware.api [None req-a76f4b22-93f7-479e-8212-f375e8a99584 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1388304, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.086894} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1167.679422] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-a76f4b22-93f7-479e-8212-f375e8a99584 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Deleted the datastore file {{(pid=62503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1167.679584] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-a76f4b22-93f7-479e-8212-f375e8a99584 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: b7c00736-b0c7-4f69-b47b-f904f84a8a2f] Deleted contents of the VM from datastore datastore2 {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1167.679761] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-a76f4b22-93f7-479e-8212-f375e8a99584 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: b7c00736-b0c7-4f69-b47b-f904f84a8a2f] Instance destroyed {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1167.679940] env[62503]: INFO nova.compute.manager [None req-a76f4b22-93f7-479e-8212-f375e8a99584 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: b7c00736-b0c7-4f69-b47b-f904f84a8a2f] Took 2.23 seconds to destroy the instance on the hypervisor. [ 1167.680202] env[62503]: DEBUG oslo.service.loopingcall [None req-a76f4b22-93f7-479e-8212-f375e8a99584 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1167.680398] env[62503]: DEBUG nova.compute.manager [-] [instance: b7c00736-b0c7-4f69-b47b-f904f84a8a2f] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 1167.680496] env[62503]: DEBUG nova.network.neutron [-] [instance: b7c00736-b0c7-4f69-b47b-f904f84a8a2f] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1168.166791] env[62503]: DEBUG nova.compute.manager [req-88427f08-74b2-4d24-845a-234d9491aa1c req-ec527f68-4c51-4b8f-a6fa-87f3b4baa6f1 service nova] [instance: b7c00736-b0c7-4f69-b47b-f904f84a8a2f] Received event network-vif-deleted-05ae8546-f74a-4c0e-8dcf-be609104f0ba {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 1168.166997] env[62503]: INFO nova.compute.manager [req-88427f08-74b2-4d24-845a-234d9491aa1c req-ec527f68-4c51-4b8f-a6fa-87f3b4baa6f1 service nova] [instance: b7c00736-b0c7-4f69-b47b-f904f84a8a2f] Neutron deleted interface 05ae8546-f74a-4c0e-8dcf-be609104f0ba; detaching it from the instance and deleting it from the info cache [ 1168.167198] env[62503]: DEBUG nova.network.neutron [req-88427f08-74b2-4d24-845a-234d9491aa1c req-ec527f68-4c51-4b8f-a6fa-87f3b4baa6f1 service nova] [instance: b7c00736-b0c7-4f69-b47b-f904f84a8a2f] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1168.636244] env[62503]: DEBUG nova.network.neutron [-] [instance: b7c00736-b0c7-4f69-b47b-f904f84a8a2f] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1168.670297] env[62503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-efa82d2a-5c00-453e-a1e8-04703c69805d {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.680698] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fd2dc58-6445-4272-8595-e076d8ff0052 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.706678] env[62503]: DEBUG nova.compute.manager [req-88427f08-74b2-4d24-845a-234d9491aa1c req-ec527f68-4c51-4b8f-a6fa-87f3b4baa6f1 service nova] [instance: b7c00736-b0c7-4f69-b47b-f904f84a8a2f] Detach interface failed, port_id=05ae8546-f74a-4c0e-8dcf-be609104f0ba, reason: Instance b7c00736-b0c7-4f69-b47b-f904f84a8a2f could not be found. {{(pid=62503) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11261}} [ 1169.139502] env[62503]: INFO nova.compute.manager [-] [instance: b7c00736-b0c7-4f69-b47b-f904f84a8a2f] Took 1.46 seconds to deallocate network for instance. [ 1169.684318] env[62503]: INFO nova.compute.manager [None req-a76f4b22-93f7-479e-8212-f375e8a99584 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: b7c00736-b0c7-4f69-b47b-f904f84a8a2f] Took 0.54 seconds to detach 1 volumes for instance. [ 1169.686312] env[62503]: DEBUG nova.compute.manager [None req-a76f4b22-93f7-479e-8212-f375e8a99584 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: b7c00736-b0c7-4f69-b47b-f904f84a8a2f] Deleting volume: 70581b1b-a294-4141-b6cc-480701c30979 {{(pid=62503) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3253}} [ 1170.224566] env[62503]: DEBUG oslo_concurrency.lockutils [None req-a76f4b22-93f7-479e-8212-f375e8a99584 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1170.224837] env[62503]: DEBUG oslo_concurrency.lockutils [None req-a76f4b22-93f7-479e-8212-f375e8a99584 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1170.225061] env[62503]: DEBUG oslo_concurrency.lockutils [None req-a76f4b22-93f7-479e-8212-f375e8a99584 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1170.248854] env[62503]: INFO nova.scheduler.client.report [None req-a76f4b22-93f7-479e-8212-f375e8a99584 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Deleted allocations for instance b7c00736-b0c7-4f69-b47b-f904f84a8a2f [ 1170.755454] env[62503]: DEBUG oslo_concurrency.lockutils [None req-a76f4b22-93f7-479e-8212-f375e8a99584 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Lock "b7c00736-b0c7-4f69-b47b-f904f84a8a2f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.315s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1171.043634] env[62503]: DEBUG oslo_concurrency.lockutils [None req-0e2bc85f-8d1c-472c-9661-03911b3c30ea tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Acquiring lock "0ae63db4-6856-46d8-afa9-876b17152859" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1171.043857] env[62503]: DEBUG oslo_concurrency.lockutils [None req-0e2bc85f-8d1c-472c-9661-03911b3c30ea tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Lock "0ae63db4-6856-46d8-afa9-876b17152859" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1171.044092] env[62503]: DEBUG oslo_concurrency.lockutils [None req-0e2bc85f-8d1c-472c-9661-03911b3c30ea tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Acquiring lock "0ae63db4-6856-46d8-afa9-876b17152859-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1171.044290] env[62503]: DEBUG oslo_concurrency.lockutils [None req-0e2bc85f-8d1c-472c-9661-03911b3c30ea tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Lock "0ae63db4-6856-46d8-afa9-876b17152859-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1171.044503] env[62503]: DEBUG oslo_concurrency.lockutils [None req-0e2bc85f-8d1c-472c-9661-03911b3c30ea tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Lock "0ae63db4-6856-46d8-afa9-876b17152859-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1171.046540] env[62503]: INFO nova.compute.manager [None req-0e2bc85f-8d1c-472c-9661-03911b3c30ea tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: 0ae63db4-6856-46d8-afa9-876b17152859] Terminating instance [ 1171.048218] env[62503]: DEBUG nova.compute.manager [None req-0e2bc85f-8d1c-472c-9661-03911b3c30ea tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: 0ae63db4-6856-46d8-afa9-876b17152859] Start destroying the instance on the hypervisor. {{(pid=62503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3156}} [ 1171.048418] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-0e2bc85f-8d1c-472c-9661-03911b3c30ea tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: 0ae63db4-6856-46d8-afa9-876b17152859] Destroying instance {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1171.049271] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-750dcebe-a5ab-4cd8-9e94-5cc0f6b683ae {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.057017] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e2bc85f-8d1c-472c-9661-03911b3c30ea tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: 0ae63db4-6856-46d8-afa9-876b17152859] Powering off the VM {{(pid=62503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1171.057258] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b3519131-4c2f-4464-9cce-fd6e2f9e896e {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.063623] env[62503]: DEBUG oslo_vmware.api [None req-0e2bc85f-8d1c-472c-9661-03911b3c30ea tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Waiting for the task: (returnval){ [ 1171.063623] env[62503]: value = "task-1388306" [ 1171.063623] env[62503]: _type = "Task" [ 1171.063623] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1171.070739] env[62503]: DEBUG oslo_vmware.api [None req-0e2bc85f-8d1c-472c-9661-03911b3c30ea tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1388306, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1171.573393] env[62503]: DEBUG oslo_vmware.api [None req-0e2bc85f-8d1c-472c-9661-03911b3c30ea tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1388306, 'name': PowerOffVM_Task, 'duration_secs': 0.196186} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1171.573619] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e2bc85f-8d1c-472c-9661-03911b3c30ea tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: 0ae63db4-6856-46d8-afa9-876b17152859] Powered off the VM {{(pid=62503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1171.573798] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-0e2bc85f-8d1c-472c-9661-03911b3c30ea tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: 0ae63db4-6856-46d8-afa9-876b17152859] Unregistering the VM {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1171.574061] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a578bca7-ac9e-468d-a12c-92261ab2ea29 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.635743] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-0e2bc85f-8d1c-472c-9661-03911b3c30ea tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: 0ae63db4-6856-46d8-afa9-876b17152859] Unregistered the VM {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1171.635958] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-0e2bc85f-8d1c-472c-9661-03911b3c30ea tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: 0ae63db4-6856-46d8-afa9-876b17152859] Deleting contents of the VM from datastore datastore2 {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1171.636164] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-0e2bc85f-8d1c-472c-9661-03911b3c30ea tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Deleting the datastore file [datastore2] 0ae63db4-6856-46d8-afa9-876b17152859 {{(pid=62503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1171.636435] env[62503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f65ae62f-24b3-4a55-81d9-f1c2586796c2 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.642478] env[62503]: DEBUG oslo_vmware.api [None req-0e2bc85f-8d1c-472c-9661-03911b3c30ea tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Waiting for the task: (returnval){ [ 1171.642478] env[62503]: value = "task-1388308" [ 1171.642478] env[62503]: _type = "Task" [ 1171.642478] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1171.649623] env[62503]: DEBUG oslo_vmware.api [None req-0e2bc85f-8d1c-472c-9661-03911b3c30ea tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1388308, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1172.151973] env[62503]: DEBUG oslo_vmware.api [None req-0e2bc85f-8d1c-472c-9661-03911b3c30ea tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1388308, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.38343} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1172.152284] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-0e2bc85f-8d1c-472c-9661-03911b3c30ea tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Deleted the datastore file {{(pid=62503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1172.152477] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-0e2bc85f-8d1c-472c-9661-03911b3c30ea tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: 0ae63db4-6856-46d8-afa9-876b17152859] Deleted contents of the VM from datastore datastore2 {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1172.152658] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-0e2bc85f-8d1c-472c-9661-03911b3c30ea tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: 0ae63db4-6856-46d8-afa9-876b17152859] Instance destroyed {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1172.152839] env[62503]: INFO nova.compute.manager [None req-0e2bc85f-8d1c-472c-9661-03911b3c30ea tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: 0ae63db4-6856-46d8-afa9-876b17152859] Took 1.10 seconds to destroy the instance on the hypervisor. [ 1172.153096] env[62503]: DEBUG oslo.service.loopingcall [None req-0e2bc85f-8d1c-472c-9661-03911b3c30ea tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1172.153324] env[62503]: DEBUG nova.compute.manager [-] [instance: 0ae63db4-6856-46d8-afa9-876b17152859] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 1172.153419] env[62503]: DEBUG nova.network.neutron [-] [instance: 0ae63db4-6856-46d8-afa9-876b17152859] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1172.459593] env[62503]: DEBUG nova.compute.manager [req-8540eb8a-54b2-47c5-9f11-defcdd5f95c1 req-71ebebce-1352-413e-aca7-f2b1fc79b178 service nova] [instance: 0ae63db4-6856-46d8-afa9-876b17152859] Received event network-vif-deleted-95590754-6e6e-4929-87d5-f7ac675aaf17 {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 1172.459754] env[62503]: INFO nova.compute.manager [req-8540eb8a-54b2-47c5-9f11-defcdd5f95c1 req-71ebebce-1352-413e-aca7-f2b1fc79b178 service nova] [instance: 0ae63db4-6856-46d8-afa9-876b17152859] Neutron deleted interface 95590754-6e6e-4929-87d5-f7ac675aaf17; detaching it from the instance and deleting it from the info cache [ 1172.459937] env[62503]: DEBUG nova.network.neutron [req-8540eb8a-54b2-47c5-9f11-defcdd5f95c1 req-71ebebce-1352-413e-aca7-f2b1fc79b178 service nova] [instance: 0ae63db4-6856-46d8-afa9-876b17152859] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1172.846220] env[62503]: DEBUG nova.network.neutron [-] [instance: 0ae63db4-6856-46d8-afa9-876b17152859] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1172.962652] env[62503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ea62a5eb-0738-4cd2-bd08-95d7b65afda1 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.972257] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a56767ce-6605-41ba-acdf-f421d6320b11 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.996661] env[62503]: DEBUG nova.compute.manager [req-8540eb8a-54b2-47c5-9f11-defcdd5f95c1 req-71ebebce-1352-413e-aca7-f2b1fc79b178 service nova] [instance: 0ae63db4-6856-46d8-afa9-876b17152859] Detach interface failed, port_id=95590754-6e6e-4929-87d5-f7ac675aaf17, reason: Instance 0ae63db4-6856-46d8-afa9-876b17152859 could not be found. {{(pid=62503) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11261}} [ 1173.349400] env[62503]: INFO nova.compute.manager [-] [instance: 0ae63db4-6856-46d8-afa9-876b17152859] Took 1.20 seconds to deallocate network for instance. [ 1173.855541] env[62503]: DEBUG oslo_concurrency.lockutils [None req-0e2bc85f-8d1c-472c-9661-03911b3c30ea tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1173.855904] env[62503]: DEBUG oslo_concurrency.lockutils [None req-0e2bc85f-8d1c-472c-9661-03911b3c30ea tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1173.856175] env[62503]: DEBUG nova.objects.instance [None req-0e2bc85f-8d1c-472c-9661-03911b3c30ea tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Lazy-loading 'resources' on Instance uuid 0ae63db4-6856-46d8-afa9-876b17152859 {{(pid=62503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1174.376724] env[62503]: DEBUG nova.scheduler.client.report [None req-0e2bc85f-8d1c-472c-9661-03911b3c30ea tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Refreshing inventories for resource provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:819}} [ 1174.391257] env[62503]: DEBUG nova.scheduler.client.report [None req-0e2bc85f-8d1c-472c-9661-03911b3c30ea tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Updating ProviderTree inventory for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:783}} [ 1174.391459] env[62503]: DEBUG nova.compute.provider_tree [None req-0e2bc85f-8d1c-472c-9661-03911b3c30ea tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Updating inventory in ProviderTree for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1174.402795] env[62503]: DEBUG nova.scheduler.client.report [None req-0e2bc85f-8d1c-472c-9661-03911b3c30ea tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Refreshing aggregate associations for resource provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2, aggregates: None {{(pid=62503) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:828}} [ 1174.420082] env[62503]: DEBUG nova.scheduler.client.report [None req-0e2bc85f-8d1c-472c-9661-03911b3c30ea tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Refreshing trait associations for resource provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2, traits: HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=62503) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:840}} [ 1174.480544] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33a58baf-04ae-4c4a-9e00-615bfc16fc7e {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.488283] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29ad63f7-9d0d-43e2-b815-42eec7f2ebde {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.517014] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad918a53-b3a4-438e-bfb1-8b9be766be53 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.523902] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c116e75-e2c5-4dc4-86ff-ff4329697f18 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.537350] env[62503]: DEBUG nova.compute.provider_tree [None req-0e2bc85f-8d1c-472c-9661-03911b3c30ea tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1175.040236] env[62503]: DEBUG nova.scheduler.client.report [None req-0e2bc85f-8d1c-472c-9661-03911b3c30ea tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1175.545387] env[62503]: DEBUG oslo_concurrency.lockutils [None req-0e2bc85f-8d1c-472c-9661-03911b3c30ea tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.689s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1175.563719] env[62503]: INFO nova.scheduler.client.report [None req-0e2bc85f-8d1c-472c-9661-03911b3c30ea tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Deleted allocations for instance 0ae63db4-6856-46d8-afa9-876b17152859 [ 1176.071796] env[62503]: DEBUG oslo_concurrency.lockutils [None req-0e2bc85f-8d1c-472c-9661-03911b3c30ea tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Lock "0ae63db4-6856-46d8-afa9-876b17152859" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.028s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1177.553390] env[62503]: DEBUG oslo_service.periodic_task [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1177.553728] env[62503]: DEBUG oslo_service.periodic_task [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1177.553783] env[62503]: DEBUG nova.compute.manager [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Starting heal instance info cache {{(pid=62503) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10224}} [ 1178.092635] env[62503]: DEBUG oslo_concurrency.lockutils [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Acquiring lock "refresh_cache-987b6101-565e-4eb2-b8af-f9afd5be38ce" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1178.092798] env[62503]: DEBUG oslo_concurrency.lockutils [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Acquired lock "refresh_cache-987b6101-565e-4eb2-b8af-f9afd5be38ce" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1178.092951] env[62503]: DEBUG nova.network.neutron [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] [instance: 987b6101-565e-4eb2-b8af-f9afd5be38ce] Forcefully refreshing network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1179.294893] env[62503]: DEBUG nova.network.neutron [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] [instance: 987b6101-565e-4eb2-b8af-f9afd5be38ce] Updating instance_info_cache with network_info: [{"id": "e0d3c642-f374-431c-95ca-9211403e44c6", "address": "fa:16:3e:cb:55:28", "network": {"id": "62a43567-d25a-406f-a4c8-68e056f67595", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-156718245-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b86eee9480274a9196fc8ccd920671f0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b8b5b5e2-866d-4ab5-b74d-4a47de0c4877", "external-id": "nsx-vlan-transportzone-74", "segmentation_id": 74, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape0d3c642-f3", "ovs_interfaceid": "e0d3c642-f374-431c-95ca-9211403e44c6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1179.665451] env[62503]: DEBUG oslo_concurrency.lockutils [None req-56956955-a8ef-4173-ae94-6038ddd29599 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Acquiring lock "b6fddb0d-70f5-433f-a0ef-0d6bffb35579" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1179.665756] env[62503]: DEBUG oslo_concurrency.lockutils [None req-56956955-a8ef-4173-ae94-6038ddd29599 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Lock "b6fddb0d-70f5-433f-a0ef-0d6bffb35579" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1179.665990] env[62503]: DEBUG oslo_concurrency.lockutils [None req-56956955-a8ef-4173-ae94-6038ddd29599 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Acquiring lock "b6fddb0d-70f5-433f-a0ef-0d6bffb35579-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1179.666207] env[62503]: DEBUG oslo_concurrency.lockutils [None req-56956955-a8ef-4173-ae94-6038ddd29599 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Lock "b6fddb0d-70f5-433f-a0ef-0d6bffb35579-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1179.666384] env[62503]: DEBUG oslo_concurrency.lockutils [None req-56956955-a8ef-4173-ae94-6038ddd29599 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Lock "b6fddb0d-70f5-433f-a0ef-0d6bffb35579-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1179.668529] env[62503]: INFO nova.compute.manager [None req-56956955-a8ef-4173-ae94-6038ddd29599 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: b6fddb0d-70f5-433f-a0ef-0d6bffb35579] Terminating instance [ 1179.670578] env[62503]: DEBUG nova.compute.manager [None req-56956955-a8ef-4173-ae94-6038ddd29599 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: b6fddb0d-70f5-433f-a0ef-0d6bffb35579] Start destroying the instance on the hypervisor. {{(pid=62503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3156}} [ 1179.670777] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-56956955-a8ef-4173-ae94-6038ddd29599 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: b6fddb0d-70f5-433f-a0ef-0d6bffb35579] Destroying instance {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1179.671638] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96b838c1-2a0b-4f13-9320-887ab2c00539 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.679377] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-56956955-a8ef-4173-ae94-6038ddd29599 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: b6fddb0d-70f5-433f-a0ef-0d6bffb35579] Powering off the VM {{(pid=62503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1179.679595] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e337b9ce-bc72-4a7f-abfa-9cd20d45de34 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.686314] env[62503]: DEBUG oslo_vmware.api [None req-56956955-a8ef-4173-ae94-6038ddd29599 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Waiting for the task: (returnval){ [ 1179.686314] env[62503]: value = "task-1388310" [ 1179.686314] env[62503]: _type = "Task" [ 1179.686314] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1179.694260] env[62503]: DEBUG oslo_vmware.api [None req-56956955-a8ef-4173-ae94-6038ddd29599 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1388310, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1179.797764] env[62503]: DEBUG oslo_concurrency.lockutils [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Releasing lock "refresh_cache-987b6101-565e-4eb2-b8af-f9afd5be38ce" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1179.798035] env[62503]: DEBUG nova.compute.manager [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] [instance: 987b6101-565e-4eb2-b8af-f9afd5be38ce] Updated the network info_cache for instance {{(pid=62503) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10295}} [ 1179.798263] env[62503]: DEBUG oslo_service.periodic_task [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1179.798424] env[62503]: DEBUG oslo_service.periodic_task [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1179.798573] env[62503]: DEBUG oslo_service.periodic_task [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1179.798727] env[62503]: DEBUG oslo_service.periodic_task [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1179.798870] env[62503]: DEBUG oslo_service.periodic_task [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1179.799026] env[62503]: DEBUG oslo_service.periodic_task [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1179.799161] env[62503]: DEBUG nova.compute.manager [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62503) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10843}} [ 1179.799323] env[62503]: DEBUG oslo_service.periodic_task [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1180.196622] env[62503]: DEBUG oslo_vmware.api [None req-56956955-a8ef-4173-ae94-6038ddd29599 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1388310, 'name': PowerOffVM_Task, 'duration_secs': 0.222595} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1180.197011] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-56956955-a8ef-4173-ae94-6038ddd29599 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: b6fddb0d-70f5-433f-a0ef-0d6bffb35579] Powered off the VM {{(pid=62503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1180.197203] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-56956955-a8ef-4173-ae94-6038ddd29599 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: b6fddb0d-70f5-433f-a0ef-0d6bffb35579] Unregistering the VM {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1180.197458] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b4383fe7-c5d6-438c-9d73-ccdc107d5005 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.261766] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-56956955-a8ef-4173-ae94-6038ddd29599 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: b6fddb0d-70f5-433f-a0ef-0d6bffb35579] Unregistered the VM {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1180.261985] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-56956955-a8ef-4173-ae94-6038ddd29599 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: b6fddb0d-70f5-433f-a0ef-0d6bffb35579] Deleting contents of the VM from datastore datastore1 {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1180.262201] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-56956955-a8ef-4173-ae94-6038ddd29599 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Deleting the datastore file [datastore1] b6fddb0d-70f5-433f-a0ef-0d6bffb35579 {{(pid=62503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1180.262485] env[62503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b8238786-7002-4cb6-83ba-eeae5a491493 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.268691] env[62503]: DEBUG oslo_vmware.api [None req-56956955-a8ef-4173-ae94-6038ddd29599 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Waiting for the task: (returnval){ [ 1180.268691] env[62503]: value = "task-1388312" [ 1180.268691] env[62503]: _type = "Task" [ 1180.268691] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1180.276421] env[62503]: DEBUG oslo_vmware.api [None req-56956955-a8ef-4173-ae94-6038ddd29599 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1388312, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1180.302618] env[62503]: DEBUG oslo_concurrency.lockutils [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1180.302909] env[62503]: DEBUG oslo_concurrency.lockutils [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1180.303126] env[62503]: DEBUG oslo_concurrency.lockutils [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1180.303308] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62503) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1180.304298] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-315d213a-fc82-4c78-a30d-4d1764a7cd33 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.312427] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4440bb74-a17e-4ddc-bb28-2df3b10b30de {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.327612] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2894903-3eed-4b60-9f36-8cbc8b7da646 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.334104] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d897f10-5dc9-4f3e-add8-e2d9f7cf7fb5 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.362861] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180088MB free_disk=175GB free_vcpus=48 pci_devices=None {{(pid=62503) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1180.363071] env[62503]: DEBUG oslo_concurrency.lockutils [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1180.363319] env[62503]: DEBUG oslo_concurrency.lockutils [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1180.779572] env[62503]: DEBUG oslo_vmware.api [None req-56956955-a8ef-4173-ae94-6038ddd29599 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Task: {'id': task-1388312, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.138582} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1180.779825] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-56956955-a8ef-4173-ae94-6038ddd29599 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Deleted the datastore file {{(pid=62503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1180.780021] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-56956955-a8ef-4173-ae94-6038ddd29599 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: b6fddb0d-70f5-433f-a0ef-0d6bffb35579] Deleted contents of the VM from datastore datastore1 {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1180.780200] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-56956955-a8ef-4173-ae94-6038ddd29599 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: b6fddb0d-70f5-433f-a0ef-0d6bffb35579] Instance destroyed {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1180.780375] env[62503]: INFO nova.compute.manager [None req-56956955-a8ef-4173-ae94-6038ddd29599 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] [instance: b6fddb0d-70f5-433f-a0ef-0d6bffb35579] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1180.780620] env[62503]: DEBUG oslo.service.loopingcall [None req-56956955-a8ef-4173-ae94-6038ddd29599 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1180.780814] env[62503]: DEBUG nova.compute.manager [-] [instance: b6fddb0d-70f5-433f-a0ef-0d6bffb35579] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 1180.780914] env[62503]: DEBUG nova.network.neutron [-] [instance: b6fddb0d-70f5-433f-a0ef-0d6bffb35579] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1181.028979] env[62503]: DEBUG nova.compute.manager [req-d832ab7b-f27e-4722-ac0e-afaae8dac0a5 req-b27da7ee-275e-4d35-ba19-681826328212 service nova] [instance: b6fddb0d-70f5-433f-a0ef-0d6bffb35579] Received event network-vif-deleted-24ac3187-6729-47ea-beb6-4c96018b8a05 {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 1181.029201] env[62503]: INFO nova.compute.manager [req-d832ab7b-f27e-4722-ac0e-afaae8dac0a5 req-b27da7ee-275e-4d35-ba19-681826328212 service nova] [instance: b6fddb0d-70f5-433f-a0ef-0d6bffb35579] Neutron deleted interface 24ac3187-6729-47ea-beb6-4c96018b8a05; detaching it from the instance and deleting it from the info cache [ 1181.029338] env[62503]: DEBUG nova.network.neutron [req-d832ab7b-f27e-4722-ac0e-afaae8dac0a5 req-b27da7ee-275e-4d35-ba19-681826328212 service nova] [instance: b6fddb0d-70f5-433f-a0ef-0d6bffb35579] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1181.175270] env[62503]: DEBUG oslo_concurrency.lockutils [None req-05073f3c-9c54-4098-a02f-3508309dd427 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Acquiring lock "94e69c2d-bf7a-42a8-a063-62ad1bb7f927" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1181.175538] env[62503]: DEBUG oslo_concurrency.lockutils [None req-05073f3c-9c54-4098-a02f-3508309dd427 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Lock "94e69c2d-bf7a-42a8-a063-62ad1bb7f927" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1181.390716] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Instance b6fddb0d-70f5-433f-a0ef-0d6bffb35579 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1181.390959] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Instance 987b6101-565e-4eb2-b8af-f9afd5be38ce actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1181.390991] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Instance 485d3aba-6c0d-46c7-860b-c0dbd9c16498 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1181.391119] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Instance 94e69c2d-bf7a-42a8-a063-62ad1bb7f927 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1181.391323] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Total usable vcpus: 48, total allocated vcpus: 4 {{(pid=62503) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1181.391485] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1280MB phys_disk=200GB used_disk=4GB total_vcpus=48 used_vcpus=4 pci_stats=[] {{(pid=62503) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1181.445452] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-741562a5-5665-4985-a2c4-4ee9dd2c35eb {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.453547] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-262b08fb-56d3-40cb-92a6-de89ec29798b {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.481912] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83caca5d-0460-45c4-bb24-e9fc74c85ab9 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.488660] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c0f78e6-6284-4440-acbd-04d376752a3b {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.502413] env[62503]: DEBUG nova.compute.provider_tree [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1181.505169] env[62503]: DEBUG nova.network.neutron [-] [instance: b6fddb0d-70f5-433f-a0ef-0d6bffb35579] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1181.532048] env[62503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-008bfb97-95bc-4949-b2f0-8bab38b41715 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.540057] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f785e58-e2e9-487c-8485-784498faf75e {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.563047] env[62503]: DEBUG nova.compute.manager [req-d832ab7b-f27e-4722-ac0e-afaae8dac0a5 req-b27da7ee-275e-4d35-ba19-681826328212 service nova] [instance: b6fddb0d-70f5-433f-a0ef-0d6bffb35579] Detach interface failed, port_id=24ac3187-6729-47ea-beb6-4c96018b8a05, reason: Instance b6fddb0d-70f5-433f-a0ef-0d6bffb35579 could not be found. {{(pid=62503) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11261}} [ 1181.678945] env[62503]: INFO nova.compute.manager [None req-05073f3c-9c54-4098-a02f-3508309dd427 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 94e69c2d-bf7a-42a8-a063-62ad1bb7f927] Detaching volume 0596d168-bfef-4df2-ac6e-ab24dec409e8 [ 1181.707956] env[62503]: INFO nova.virt.block_device [None req-05073f3c-9c54-4098-a02f-3508309dd427 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 94e69c2d-bf7a-42a8-a063-62ad1bb7f927] Attempting to driver detach volume 0596d168-bfef-4df2-ac6e-ab24dec409e8 from mountpoint /dev/sdb [ 1181.708238] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-05073f3c-9c54-4098-a02f-3508309dd427 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 94e69c2d-bf7a-42a8-a063-62ad1bb7f927] Volume detach. Driver type: vmdk {{(pid=62503) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1181.708435] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-05073f3c-9c54-4098-a02f-3508309dd427 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 94e69c2d-bf7a-42a8-a063-62ad1bb7f927] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-294659', 'volume_id': '0596d168-bfef-4df2-ac6e-ab24dec409e8', 'name': 'volume-0596d168-bfef-4df2-ac6e-ab24dec409e8', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '94e69c2d-bf7a-42a8-a063-62ad1bb7f927', 'attached_at': '', 'detached_at': '', 'volume_id': '0596d168-bfef-4df2-ac6e-ab24dec409e8', 'serial': '0596d168-bfef-4df2-ac6e-ab24dec409e8'} {{(pid=62503) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1181.709318] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ebc106b-e187-46e1-a964-4dcc94784a9e {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.731042] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bceeb096-68b7-41b8-90ec-d26e3e313c4f {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.737803] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04ddaf1e-2122-4ed0-8084-85c6a4eb38d4 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.760059] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71fc05d9-0e59-4b41-904a-bc7a5c39cf3a {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.773472] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-05073f3c-9c54-4098-a02f-3508309dd427 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] The volume has not been displaced from its original location: [datastore2] volume-0596d168-bfef-4df2-ac6e-ab24dec409e8/volume-0596d168-bfef-4df2-ac6e-ab24dec409e8.vmdk. No consolidation needed. {{(pid=62503) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1181.778564] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-05073f3c-9c54-4098-a02f-3508309dd427 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 94e69c2d-bf7a-42a8-a063-62ad1bb7f927] Reconfiguring VM instance instance-0000006a to detach disk 2001 {{(pid=62503) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1181.779123] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-08d0d525-0b96-4d28-b07d-06368a3997c9 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.795684] env[62503]: DEBUG oslo_vmware.api [None req-05073f3c-9c54-4098-a02f-3508309dd427 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Waiting for the task: (returnval){ [ 1181.795684] env[62503]: value = "task-1388313" [ 1181.795684] env[62503]: _type = "Task" [ 1181.795684] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1181.802629] env[62503]: DEBUG oslo_vmware.api [None req-05073f3c-9c54-4098-a02f-3508309dd427 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Task: {'id': task-1388313, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1182.005954] env[62503]: DEBUG nova.scheduler.client.report [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1182.009128] env[62503]: INFO nova.compute.manager [-] [instance: b6fddb0d-70f5-433f-a0ef-0d6bffb35579] Took 1.23 seconds to deallocate network for instance. [ 1182.306434] env[62503]: DEBUG oslo_vmware.api [None req-05073f3c-9c54-4098-a02f-3508309dd427 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Task: {'id': task-1388313, 'name': ReconfigVM_Task, 'duration_secs': 0.232975} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1182.306717] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-05073f3c-9c54-4098-a02f-3508309dd427 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 94e69c2d-bf7a-42a8-a063-62ad1bb7f927] Reconfigured VM instance instance-0000006a to detach disk 2001 {{(pid=62503) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1182.311294] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-30373610-14cb-4a2a-ab86-a8d06415f0bd {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.325721] env[62503]: DEBUG oslo_vmware.api [None req-05073f3c-9c54-4098-a02f-3508309dd427 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Waiting for the task: (returnval){ [ 1182.325721] env[62503]: value = "task-1388314" [ 1182.325721] env[62503]: _type = "Task" [ 1182.325721] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1182.332846] env[62503]: DEBUG oslo_vmware.api [None req-05073f3c-9c54-4098-a02f-3508309dd427 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Task: {'id': task-1388314, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1182.510890] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62503) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1182.511179] env[62503]: DEBUG oslo_concurrency.lockutils [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.148s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1182.514446] env[62503]: DEBUG oslo_concurrency.lockutils [None req-56956955-a8ef-4173-ae94-6038ddd29599 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1182.514710] env[62503]: DEBUG oslo_concurrency.lockutils [None req-56956955-a8ef-4173-ae94-6038ddd29599 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1182.514924] env[62503]: DEBUG nova.objects.instance [None req-56956955-a8ef-4173-ae94-6038ddd29599 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Lazy-loading 'resources' on Instance uuid b6fddb0d-70f5-433f-a0ef-0d6bffb35579 {{(pid=62503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1182.835745] env[62503]: DEBUG oslo_vmware.api [None req-05073f3c-9c54-4098-a02f-3508309dd427 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Task: {'id': task-1388314, 'name': ReconfigVM_Task, 'duration_secs': 0.130192} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1182.836091] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-05073f3c-9c54-4098-a02f-3508309dd427 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 94e69c2d-bf7a-42a8-a063-62ad1bb7f927] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-294659', 'volume_id': '0596d168-bfef-4df2-ac6e-ab24dec409e8', 'name': 'volume-0596d168-bfef-4df2-ac6e-ab24dec409e8', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '94e69c2d-bf7a-42a8-a063-62ad1bb7f927', 'attached_at': '', 'detached_at': '', 'volume_id': '0596d168-bfef-4df2-ac6e-ab24dec409e8', 'serial': '0596d168-bfef-4df2-ac6e-ab24dec409e8'} {{(pid=62503) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1183.070040] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30a4e300-ac86-4bc5-ab03-bdb2383d4229 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.077555] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc66b90f-716f-45b4-9554-e89864bae4ca {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.106146] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b0a2fd8-f4f3-4ed0-abd5-fa157475c4dc {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.112793] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-706a3c81-d928-4a9d-b1b9-be8748acc7ae {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.125323] env[62503]: DEBUG nova.compute.provider_tree [None req-56956955-a8ef-4173-ae94-6038ddd29599 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1183.377514] env[62503]: DEBUG nova.objects.instance [None req-05073f3c-9c54-4098-a02f-3508309dd427 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Lazy-loading 'flavor' on Instance uuid 94e69c2d-bf7a-42a8-a063-62ad1bb7f927 {{(pid=62503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1183.628379] env[62503]: DEBUG nova.scheduler.client.report [None req-56956955-a8ef-4173-ae94-6038ddd29599 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1184.132894] env[62503]: DEBUG oslo_concurrency.lockutils [None req-56956955-a8ef-4173-ae94-6038ddd29599 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.618s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1184.156318] env[62503]: INFO nova.scheduler.client.report [None req-56956955-a8ef-4173-ae94-6038ddd29599 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Deleted allocations for instance b6fddb0d-70f5-433f-a0ef-0d6bffb35579 [ 1184.384500] env[62503]: DEBUG oslo_concurrency.lockutils [None req-05073f3c-9c54-4098-a02f-3508309dd427 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Lock "94e69c2d-bf7a-42a8-a063-62ad1bb7f927" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.209s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1184.664509] env[62503]: DEBUG oslo_concurrency.lockutils [None req-56956955-a8ef-4173-ae94-6038ddd29599 tempest-ServerActionsTestOtherA-86245922 tempest-ServerActionsTestOtherA-86245922-project-member] Lock "b6fddb0d-70f5-433f-a0ef-0d6bffb35579" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 4.999s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1185.500368] env[62503]: DEBUG oslo_concurrency.lockutils [None req-69e438cd-dfb3-46e0-85a7-c771055f53f2 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Acquiring lock "94e69c2d-bf7a-42a8-a063-62ad1bb7f927" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1185.500611] env[62503]: DEBUG oslo_concurrency.lockutils [None req-69e438cd-dfb3-46e0-85a7-c771055f53f2 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Lock "94e69c2d-bf7a-42a8-a063-62ad1bb7f927" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1185.500840] env[62503]: DEBUG oslo_concurrency.lockutils [None req-69e438cd-dfb3-46e0-85a7-c771055f53f2 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Acquiring lock "94e69c2d-bf7a-42a8-a063-62ad1bb7f927-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1185.501050] env[62503]: DEBUG oslo_concurrency.lockutils [None req-69e438cd-dfb3-46e0-85a7-c771055f53f2 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Lock "94e69c2d-bf7a-42a8-a063-62ad1bb7f927-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1185.501233] env[62503]: DEBUG oslo_concurrency.lockutils [None req-69e438cd-dfb3-46e0-85a7-c771055f53f2 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Lock "94e69c2d-bf7a-42a8-a063-62ad1bb7f927-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1185.503879] env[62503]: INFO nova.compute.manager [None req-69e438cd-dfb3-46e0-85a7-c771055f53f2 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 94e69c2d-bf7a-42a8-a063-62ad1bb7f927] Terminating instance [ 1185.504987] env[62503]: DEBUG nova.compute.manager [None req-69e438cd-dfb3-46e0-85a7-c771055f53f2 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 94e69c2d-bf7a-42a8-a063-62ad1bb7f927] Start destroying the instance on the hypervisor. {{(pid=62503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3156}} [ 1185.505164] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-69e438cd-dfb3-46e0-85a7-c771055f53f2 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 94e69c2d-bf7a-42a8-a063-62ad1bb7f927] Destroying instance {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1185.505994] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71867f97-edea-4330-820b-69d571c2dd2b {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.514051] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-69e438cd-dfb3-46e0-85a7-c771055f53f2 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 94e69c2d-bf7a-42a8-a063-62ad1bb7f927] Powering off the VM {{(pid=62503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1185.514287] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-90735de6-fcde-4476-b710-070c11ac9e27 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.520073] env[62503]: DEBUG oslo_vmware.api [None req-69e438cd-dfb3-46e0-85a7-c771055f53f2 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Waiting for the task: (returnval){ [ 1185.520073] env[62503]: value = "task-1388315" [ 1185.520073] env[62503]: _type = "Task" [ 1185.520073] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1185.528623] env[62503]: DEBUG oslo_vmware.api [None req-69e438cd-dfb3-46e0-85a7-c771055f53f2 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Task: {'id': task-1388315, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1186.030515] env[62503]: DEBUG oslo_vmware.api [None req-69e438cd-dfb3-46e0-85a7-c771055f53f2 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Task: {'id': task-1388315, 'name': PowerOffVM_Task, 'duration_secs': 0.186559} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1186.030805] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-69e438cd-dfb3-46e0-85a7-c771055f53f2 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 94e69c2d-bf7a-42a8-a063-62ad1bb7f927] Powered off the VM {{(pid=62503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1186.030982] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-69e438cd-dfb3-46e0-85a7-c771055f53f2 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 94e69c2d-bf7a-42a8-a063-62ad1bb7f927] Unregistering the VM {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1186.031262] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-37b33370-1439-41fb-9f37-543359d5f060 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.097520] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-69e438cd-dfb3-46e0-85a7-c771055f53f2 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 94e69c2d-bf7a-42a8-a063-62ad1bb7f927] Unregistered the VM {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1186.097647] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-69e438cd-dfb3-46e0-85a7-c771055f53f2 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 94e69c2d-bf7a-42a8-a063-62ad1bb7f927] Deleting contents of the VM from datastore datastore2 {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1186.097761] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-69e438cd-dfb3-46e0-85a7-c771055f53f2 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Deleting the datastore file [datastore2] 94e69c2d-bf7a-42a8-a063-62ad1bb7f927 {{(pid=62503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1186.098139] env[62503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c306ee6d-8cd9-4ed4-bce2-28fc71f66008 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.105623] env[62503]: DEBUG oslo_vmware.api [None req-69e438cd-dfb3-46e0-85a7-c771055f53f2 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Waiting for the task: (returnval){ [ 1186.105623] env[62503]: value = "task-1388317" [ 1186.105623] env[62503]: _type = "Task" [ 1186.105623] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1186.113360] env[62503]: DEBUG oslo_vmware.api [None req-69e438cd-dfb3-46e0-85a7-c771055f53f2 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Task: {'id': task-1388317, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1186.615970] env[62503]: DEBUG oslo_vmware.api [None req-69e438cd-dfb3-46e0-85a7-c771055f53f2 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Task: {'id': task-1388317, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.158217} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1186.616241] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-69e438cd-dfb3-46e0-85a7-c771055f53f2 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Deleted the datastore file {{(pid=62503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1186.616499] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-69e438cd-dfb3-46e0-85a7-c771055f53f2 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 94e69c2d-bf7a-42a8-a063-62ad1bb7f927] Deleted contents of the VM from datastore datastore2 {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1186.616705] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-69e438cd-dfb3-46e0-85a7-c771055f53f2 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 94e69c2d-bf7a-42a8-a063-62ad1bb7f927] Instance destroyed {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1186.616887] env[62503]: INFO nova.compute.manager [None req-69e438cd-dfb3-46e0-85a7-c771055f53f2 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 94e69c2d-bf7a-42a8-a063-62ad1bb7f927] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1186.617146] env[62503]: DEBUG oslo.service.loopingcall [None req-69e438cd-dfb3-46e0-85a7-c771055f53f2 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1186.617345] env[62503]: DEBUG nova.compute.manager [-] [instance: 94e69c2d-bf7a-42a8-a063-62ad1bb7f927] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 1186.617441] env[62503]: DEBUG nova.network.neutron [-] [instance: 94e69c2d-bf7a-42a8-a063-62ad1bb7f927] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1187.033248] env[62503]: DEBUG nova.compute.manager [req-889db67d-4c27-4799-a01a-95fe77303724 req-e6d33ec3-2437-48b5-9cbd-9903dda32380 service nova] [instance: 94e69c2d-bf7a-42a8-a063-62ad1bb7f927] Received event network-vif-deleted-0a00591c-6583-407d-bf50-60a53719508b {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 1187.033653] env[62503]: INFO nova.compute.manager [req-889db67d-4c27-4799-a01a-95fe77303724 req-e6d33ec3-2437-48b5-9cbd-9903dda32380 service nova] [instance: 94e69c2d-bf7a-42a8-a063-62ad1bb7f927] Neutron deleted interface 0a00591c-6583-407d-bf50-60a53719508b; detaching it from the instance and deleting it from the info cache [ 1187.033930] env[62503]: DEBUG nova.network.neutron [req-889db67d-4c27-4799-a01a-95fe77303724 req-e6d33ec3-2437-48b5-9cbd-9903dda32380 service nova] [instance: 94e69c2d-bf7a-42a8-a063-62ad1bb7f927] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1187.506294] env[62503]: DEBUG nova.network.neutron [-] [instance: 94e69c2d-bf7a-42a8-a063-62ad1bb7f927] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1187.537356] env[62503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-900bd209-f43c-4814-8bbd-507f01e704a3 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.547749] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-220cbd2c-44e6-4352-b1db-d5946b744913 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.573195] env[62503]: DEBUG nova.compute.manager [req-889db67d-4c27-4799-a01a-95fe77303724 req-e6d33ec3-2437-48b5-9cbd-9903dda32380 service nova] [instance: 94e69c2d-bf7a-42a8-a063-62ad1bb7f927] Detach interface failed, port_id=0a00591c-6583-407d-bf50-60a53719508b, reason: Instance 94e69c2d-bf7a-42a8-a063-62ad1bb7f927 could not be found. {{(pid=62503) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11261}} [ 1188.009197] env[62503]: INFO nova.compute.manager [-] [instance: 94e69c2d-bf7a-42a8-a063-62ad1bb7f927] Took 1.39 seconds to deallocate network for instance. [ 1188.516137] env[62503]: DEBUG oslo_concurrency.lockutils [None req-69e438cd-dfb3-46e0-85a7-c771055f53f2 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1188.516425] env[62503]: DEBUG oslo_concurrency.lockutils [None req-69e438cd-dfb3-46e0-85a7-c771055f53f2 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1188.516652] env[62503]: DEBUG nova.objects.instance [None req-69e438cd-dfb3-46e0-85a7-c771055f53f2 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Lazy-loading 'resources' on Instance uuid 94e69c2d-bf7a-42a8-a063-62ad1bb7f927 {{(pid=62503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1189.069401] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c052d49-8e44-4ac2-94d5-5260fcae5015 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.077130] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e96ab840-59b8-45c5-b7ea-1e9ba482cdf8 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.106867] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f313429-6802-4588-b050-4fa26ad69ea4 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.113526] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0597b21-b5cf-42d8-a371-d6a2bf0f3020 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.126156] env[62503]: DEBUG nova.compute.provider_tree [None req-69e438cd-dfb3-46e0-85a7-c771055f53f2 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1189.628906] env[62503]: DEBUG nova.scheduler.client.report [None req-69e438cd-dfb3-46e0-85a7-c771055f53f2 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1190.134137] env[62503]: DEBUG oslo_concurrency.lockutils [None req-69e438cd-dfb3-46e0-85a7-c771055f53f2 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.617s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1190.151824] env[62503]: INFO nova.scheduler.client.report [None req-69e438cd-dfb3-46e0-85a7-c771055f53f2 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Deleted allocations for instance 94e69c2d-bf7a-42a8-a063-62ad1bb7f927 [ 1190.659070] env[62503]: DEBUG oslo_concurrency.lockutils [None req-69e438cd-dfb3-46e0-85a7-c771055f53f2 tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Lock "94e69c2d-bf7a-42a8-a063-62ad1bb7f927" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.158s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1195.808520] env[62503]: DEBUG oslo_concurrency.lockutils [None req-d1c38bb2-b1a0-4657-b248-1d03f882187d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Acquiring lock "485d3aba-6c0d-46c7-860b-c0dbd9c16498" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1195.808893] env[62503]: DEBUG oslo_concurrency.lockutils [None req-d1c38bb2-b1a0-4657-b248-1d03f882187d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Lock "485d3aba-6c0d-46c7-860b-c0dbd9c16498" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1195.809022] env[62503]: DEBUG oslo_concurrency.lockutils [None req-d1c38bb2-b1a0-4657-b248-1d03f882187d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Acquiring lock "485d3aba-6c0d-46c7-860b-c0dbd9c16498-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1195.809233] env[62503]: DEBUG oslo_concurrency.lockutils [None req-d1c38bb2-b1a0-4657-b248-1d03f882187d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Lock "485d3aba-6c0d-46c7-860b-c0dbd9c16498-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1195.809423] env[62503]: DEBUG oslo_concurrency.lockutils [None req-d1c38bb2-b1a0-4657-b248-1d03f882187d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Lock "485d3aba-6c0d-46c7-860b-c0dbd9c16498-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1195.811743] env[62503]: INFO nova.compute.manager [None req-d1c38bb2-b1a0-4657-b248-1d03f882187d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 485d3aba-6c0d-46c7-860b-c0dbd9c16498] Terminating instance [ 1195.813681] env[62503]: DEBUG nova.compute.manager [None req-d1c38bb2-b1a0-4657-b248-1d03f882187d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 485d3aba-6c0d-46c7-860b-c0dbd9c16498] Start destroying the instance on the hypervisor. {{(pid=62503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3156}} [ 1195.813923] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-d1c38bb2-b1a0-4657-b248-1d03f882187d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 485d3aba-6c0d-46c7-860b-c0dbd9c16498] Destroying instance {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1195.814961] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d36fd90a-5b31-47fe-90c0-a58754767044 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.823397] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-d1c38bb2-b1a0-4657-b248-1d03f882187d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 485d3aba-6c0d-46c7-860b-c0dbd9c16498] Powering off the VM {{(pid=62503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1195.823656] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8b28f1a7-893d-44fc-8411-a731d851c122 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.829847] env[62503]: DEBUG oslo_vmware.api [None req-d1c38bb2-b1a0-4657-b248-1d03f882187d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Waiting for the task: (returnval){ [ 1195.829847] env[62503]: value = "task-1388319" [ 1195.829847] env[62503]: _type = "Task" [ 1195.829847] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1195.837753] env[62503]: DEBUG oslo_vmware.api [None req-d1c38bb2-b1a0-4657-b248-1d03f882187d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Task: {'id': task-1388319, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1196.339456] env[62503]: DEBUG oslo_vmware.api [None req-d1c38bb2-b1a0-4657-b248-1d03f882187d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Task: {'id': task-1388319, 'name': PowerOffVM_Task, 'duration_secs': 0.222083} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1196.339735] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-d1c38bb2-b1a0-4657-b248-1d03f882187d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 485d3aba-6c0d-46c7-860b-c0dbd9c16498] Powered off the VM {{(pid=62503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1196.339923] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-d1c38bb2-b1a0-4657-b248-1d03f882187d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 485d3aba-6c0d-46c7-860b-c0dbd9c16498] Unregistering the VM {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1196.340203] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3b268c52-7bd2-4f31-a929-fce6ee7db29a {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.424698] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-d1c38bb2-b1a0-4657-b248-1d03f882187d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 485d3aba-6c0d-46c7-860b-c0dbd9c16498] Unregistered the VM {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1196.424929] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-d1c38bb2-b1a0-4657-b248-1d03f882187d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 485d3aba-6c0d-46c7-860b-c0dbd9c16498] Deleting contents of the VM from datastore datastore1 {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1196.425133] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-d1c38bb2-b1a0-4657-b248-1d03f882187d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Deleting the datastore file [datastore1] 485d3aba-6c0d-46c7-860b-c0dbd9c16498 {{(pid=62503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1196.425390] env[62503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8a872f39-afcb-4fc7-8dd6-d52adf90d591 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.432317] env[62503]: DEBUG oslo_vmware.api [None req-d1c38bb2-b1a0-4657-b248-1d03f882187d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Waiting for the task: (returnval){ [ 1196.432317] env[62503]: value = "task-1388321" [ 1196.432317] env[62503]: _type = "Task" [ 1196.432317] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1196.439420] env[62503]: DEBUG oslo_vmware.api [None req-d1c38bb2-b1a0-4657-b248-1d03f882187d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Task: {'id': task-1388321, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1196.943614] env[62503]: DEBUG oslo_vmware.api [None req-d1c38bb2-b1a0-4657-b248-1d03f882187d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Task: {'id': task-1388321, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.174044} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1196.944034] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-d1c38bb2-b1a0-4657-b248-1d03f882187d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Deleted the datastore file {{(pid=62503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1196.944034] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-d1c38bb2-b1a0-4657-b248-1d03f882187d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 485d3aba-6c0d-46c7-860b-c0dbd9c16498] Deleted contents of the VM from datastore datastore1 {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1196.944216] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-d1c38bb2-b1a0-4657-b248-1d03f882187d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 485d3aba-6c0d-46c7-860b-c0dbd9c16498] Instance destroyed {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1196.944397] env[62503]: INFO nova.compute.manager [None req-d1c38bb2-b1a0-4657-b248-1d03f882187d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 485d3aba-6c0d-46c7-860b-c0dbd9c16498] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1196.944664] env[62503]: DEBUG oslo.service.loopingcall [None req-d1c38bb2-b1a0-4657-b248-1d03f882187d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1196.944900] env[62503]: DEBUG nova.compute.manager [-] [instance: 485d3aba-6c0d-46c7-860b-c0dbd9c16498] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 1196.944999] env[62503]: DEBUG nova.network.neutron [-] [instance: 485d3aba-6c0d-46c7-860b-c0dbd9c16498] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1197.175509] env[62503]: DEBUG nova.compute.manager [req-4179b37f-e3c2-40b0-89b7-0bcb6e0a0a6b req-61ca1011-9357-43d7-a7f4-0309d851044b service nova] [instance: 485d3aba-6c0d-46c7-860b-c0dbd9c16498] Received event network-vif-deleted-b0401560-7408-4d52-a32c-906d5934c94e {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 1197.175724] env[62503]: INFO nova.compute.manager [req-4179b37f-e3c2-40b0-89b7-0bcb6e0a0a6b req-61ca1011-9357-43d7-a7f4-0309d851044b service nova] [instance: 485d3aba-6c0d-46c7-860b-c0dbd9c16498] Neutron deleted interface b0401560-7408-4d52-a32c-906d5934c94e; detaching it from the instance and deleting it from the info cache [ 1197.175928] env[62503]: DEBUG nova.network.neutron [req-4179b37f-e3c2-40b0-89b7-0bcb6e0a0a6b req-61ca1011-9357-43d7-a7f4-0309d851044b service nova] [instance: 485d3aba-6c0d-46c7-860b-c0dbd9c16498] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1197.655854] env[62503]: DEBUG nova.network.neutron [-] [instance: 485d3aba-6c0d-46c7-860b-c0dbd9c16498] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1197.678291] env[62503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-11b08bfb-24e8-4804-9339-fbe1da237d63 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.688227] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9ee523f-94da-4ef2-9307-f53eed193596 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.713191] env[62503]: DEBUG nova.compute.manager [req-4179b37f-e3c2-40b0-89b7-0bcb6e0a0a6b req-61ca1011-9357-43d7-a7f4-0309d851044b service nova] [instance: 485d3aba-6c0d-46c7-860b-c0dbd9c16498] Detach interface failed, port_id=b0401560-7408-4d52-a32c-906d5934c94e, reason: Instance 485d3aba-6c0d-46c7-860b-c0dbd9c16498 could not be found. {{(pid=62503) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11261}} [ 1198.158529] env[62503]: INFO nova.compute.manager [-] [instance: 485d3aba-6c0d-46c7-860b-c0dbd9c16498] Took 1.21 seconds to deallocate network for instance. [ 1198.665890] env[62503]: DEBUG oslo_concurrency.lockutils [None req-d1c38bb2-b1a0-4657-b248-1d03f882187d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1198.666030] env[62503]: DEBUG oslo_concurrency.lockutils [None req-d1c38bb2-b1a0-4657-b248-1d03f882187d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1198.666685] env[62503]: DEBUG nova.objects.instance [None req-d1c38bb2-b1a0-4657-b248-1d03f882187d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Lazy-loading 'resources' on Instance uuid 485d3aba-6c0d-46c7-860b-c0dbd9c16498 {{(pid=62503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1199.210646] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-596db9b4-afcc-402a-b3ea-716d8f189f31 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.218226] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dc40a1f-775e-47dd-b9f3-b1a0f4b495cf {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.249576] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-605fa2b6-a114-4b21-b78a-1279b3b3f00b {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.256933] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cdc8378-f2f5-4bb6-8f35-c1be5fd2ab8b {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.269860] env[62503]: DEBUG nova.compute.provider_tree [None req-d1c38bb2-b1a0-4657-b248-1d03f882187d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1199.773304] env[62503]: DEBUG nova.scheduler.client.report [None req-d1c38bb2-b1a0-4657-b248-1d03f882187d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1200.278476] env[62503]: DEBUG oslo_concurrency.lockutils [None req-d1c38bb2-b1a0-4657-b248-1d03f882187d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.612s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1200.298601] env[62503]: INFO nova.scheduler.client.report [None req-d1c38bb2-b1a0-4657-b248-1d03f882187d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Deleted allocations for instance 485d3aba-6c0d-46c7-860b-c0dbd9c16498 [ 1200.807482] env[62503]: DEBUG oslo_concurrency.lockutils [None req-d1c38bb2-b1a0-4657-b248-1d03f882187d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Lock "485d3aba-6c0d-46c7-860b-c0dbd9c16498" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 4.999s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1201.325353] env[62503]: DEBUG oslo_concurrency.lockutils [None req-4b626b02-98bf-4ee5-9321-93768d97793d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Acquiring lock "987b6101-565e-4eb2-b8af-f9afd5be38ce" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1201.325691] env[62503]: DEBUG oslo_concurrency.lockutils [None req-4b626b02-98bf-4ee5-9321-93768d97793d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Lock "987b6101-565e-4eb2-b8af-f9afd5be38ce" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1201.325822] env[62503]: DEBUG oslo_concurrency.lockutils [None req-4b626b02-98bf-4ee5-9321-93768d97793d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Acquiring lock "987b6101-565e-4eb2-b8af-f9afd5be38ce-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1201.326014] env[62503]: DEBUG oslo_concurrency.lockutils [None req-4b626b02-98bf-4ee5-9321-93768d97793d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Lock "987b6101-565e-4eb2-b8af-f9afd5be38ce-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1201.326211] env[62503]: DEBUG oslo_concurrency.lockutils [None req-4b626b02-98bf-4ee5-9321-93768d97793d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Lock "987b6101-565e-4eb2-b8af-f9afd5be38ce-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1201.328388] env[62503]: INFO nova.compute.manager [None req-4b626b02-98bf-4ee5-9321-93768d97793d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 987b6101-565e-4eb2-b8af-f9afd5be38ce] Terminating instance [ 1201.330291] env[62503]: DEBUG nova.compute.manager [None req-4b626b02-98bf-4ee5-9321-93768d97793d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 987b6101-565e-4eb2-b8af-f9afd5be38ce] Start destroying the instance on the hypervisor. {{(pid=62503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3156}} [ 1201.330291] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-4b626b02-98bf-4ee5-9321-93768d97793d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 987b6101-565e-4eb2-b8af-f9afd5be38ce] Destroying instance {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1201.331131] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e399668f-2bdd-4d2f-b679-b9486c1a6882 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.338451] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b626b02-98bf-4ee5-9321-93768d97793d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 987b6101-565e-4eb2-b8af-f9afd5be38ce] Powering off the VM {{(pid=62503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1201.338669] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-df777473-2487-4965-b483-d69f4c84675b {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.344797] env[62503]: DEBUG oslo_vmware.api [None req-4b626b02-98bf-4ee5-9321-93768d97793d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Waiting for the task: (returnval){ [ 1201.344797] env[62503]: value = "task-1388322" [ 1201.344797] env[62503]: _type = "Task" [ 1201.344797] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1201.352163] env[62503]: DEBUG oslo_vmware.api [None req-4b626b02-98bf-4ee5-9321-93768d97793d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Task: {'id': task-1388322, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1201.855204] env[62503]: DEBUG oslo_vmware.api [None req-4b626b02-98bf-4ee5-9321-93768d97793d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Task: {'id': task-1388322, 'name': PowerOffVM_Task, 'duration_secs': 0.200596} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1201.855474] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b626b02-98bf-4ee5-9321-93768d97793d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 987b6101-565e-4eb2-b8af-f9afd5be38ce] Powered off the VM {{(pid=62503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1201.855652] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-4b626b02-98bf-4ee5-9321-93768d97793d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 987b6101-565e-4eb2-b8af-f9afd5be38ce] Unregistering the VM {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1201.855900] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c59e730f-2467-4531-85d6-935b68b555d5 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.919307] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-4b626b02-98bf-4ee5-9321-93768d97793d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 987b6101-565e-4eb2-b8af-f9afd5be38ce] Unregistered the VM {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1201.919556] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-4b626b02-98bf-4ee5-9321-93768d97793d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 987b6101-565e-4eb2-b8af-f9afd5be38ce] Deleting contents of the VM from datastore datastore1 {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1201.919725] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-4b626b02-98bf-4ee5-9321-93768d97793d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Deleting the datastore file [datastore1] 987b6101-565e-4eb2-b8af-f9afd5be38ce {{(pid=62503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1201.919993] env[62503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2e671c2e-4e74-4b67-af64-27c2254e95b1 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.925853] env[62503]: DEBUG oslo_vmware.api [None req-4b626b02-98bf-4ee5-9321-93768d97793d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Waiting for the task: (returnval){ [ 1201.925853] env[62503]: value = "task-1388324" [ 1201.925853] env[62503]: _type = "Task" [ 1201.925853] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1201.933192] env[62503]: DEBUG oslo_vmware.api [None req-4b626b02-98bf-4ee5-9321-93768d97793d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Task: {'id': task-1388324, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1202.435657] env[62503]: DEBUG oslo_vmware.api [None req-4b626b02-98bf-4ee5-9321-93768d97793d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Task: {'id': task-1388324, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.137559} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1202.436036] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-4b626b02-98bf-4ee5-9321-93768d97793d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Deleted the datastore file {{(pid=62503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1202.436153] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-4b626b02-98bf-4ee5-9321-93768d97793d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 987b6101-565e-4eb2-b8af-f9afd5be38ce] Deleted contents of the VM from datastore datastore1 {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1202.436340] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-4b626b02-98bf-4ee5-9321-93768d97793d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 987b6101-565e-4eb2-b8af-f9afd5be38ce] Instance destroyed {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1202.436520] env[62503]: INFO nova.compute.manager [None req-4b626b02-98bf-4ee5-9321-93768d97793d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] [instance: 987b6101-565e-4eb2-b8af-f9afd5be38ce] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1202.436764] env[62503]: DEBUG oslo.service.loopingcall [None req-4b626b02-98bf-4ee5-9321-93768d97793d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1202.436961] env[62503]: DEBUG nova.compute.manager [-] [instance: 987b6101-565e-4eb2-b8af-f9afd5be38ce] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 1202.437068] env[62503]: DEBUG nova.network.neutron [-] [instance: 987b6101-565e-4eb2-b8af-f9afd5be38ce] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1202.657057] env[62503]: DEBUG nova.compute.manager [req-a1d37100-646f-4fa9-8c0d-b10e6f48bb1e req-833bc3a2-ac6e-437c-9635-3a48f25a6b70 service nova] [instance: 987b6101-565e-4eb2-b8af-f9afd5be38ce] Received event network-vif-deleted-e0d3c642-f374-431c-95ca-9211403e44c6 {{(pid=62503) external_instance_event /opt/stack/nova/nova/compute/manager.py:11427}} [ 1202.657412] env[62503]: INFO nova.compute.manager [req-a1d37100-646f-4fa9-8c0d-b10e6f48bb1e req-833bc3a2-ac6e-437c-9635-3a48f25a6b70 service nova] [instance: 987b6101-565e-4eb2-b8af-f9afd5be38ce] Neutron deleted interface e0d3c642-f374-431c-95ca-9211403e44c6; detaching it from the instance and deleting it from the info cache [ 1202.657601] env[62503]: DEBUG nova.network.neutron [req-a1d37100-646f-4fa9-8c0d-b10e6f48bb1e req-833bc3a2-ac6e-437c-9635-3a48f25a6b70 service nova] [instance: 987b6101-565e-4eb2-b8af-f9afd5be38ce] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1203.132030] env[62503]: DEBUG nova.network.neutron [-] [instance: 987b6101-565e-4eb2-b8af-f9afd5be38ce] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1203.159291] env[62503]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-962cc9fe-fbed-4701-abf3-629f4b0fc808 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.169187] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f854551-9aff-4c13-b266-491bb59a5045 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.190526] env[62503]: DEBUG nova.compute.manager [req-a1d37100-646f-4fa9-8c0d-b10e6f48bb1e req-833bc3a2-ac6e-437c-9635-3a48f25a6b70 service nova] [instance: 987b6101-565e-4eb2-b8af-f9afd5be38ce] Detach interface failed, port_id=e0d3c642-f374-431c-95ca-9211403e44c6, reason: Instance 987b6101-565e-4eb2-b8af-f9afd5be38ce could not be found. {{(pid=62503) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11261}} [ 1203.636218] env[62503]: INFO nova.compute.manager [-] [instance: 987b6101-565e-4eb2-b8af-f9afd5be38ce] Took 1.20 seconds to deallocate network for instance. [ 1204.142626] env[62503]: DEBUG oslo_concurrency.lockutils [None req-4b626b02-98bf-4ee5-9321-93768d97793d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1204.142991] env[62503]: DEBUG oslo_concurrency.lockutils [None req-4b626b02-98bf-4ee5-9321-93768d97793d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1204.143246] env[62503]: DEBUG nova.objects.instance [None req-4b626b02-98bf-4ee5-9321-93768d97793d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Lazy-loading 'resources' on Instance uuid 987b6101-565e-4eb2-b8af-f9afd5be38ce {{(pid=62503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1204.680851] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8fcf52c-0c77-4075-8540-a9dbf8444a9f {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.688182] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7f91935-1086-4a94-b9b8-f77269b134f1 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.716436] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b60ef92-2619-4bb6-b41c-6914315aea6d {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.723464] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53740cd7-6e11-4c4c-b133-a97035067cd0 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.739948] env[62503]: DEBUG nova.compute.provider_tree [None req-4b626b02-98bf-4ee5-9321-93768d97793d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1205.244014] env[62503]: DEBUG nova.scheduler.client.report [None req-4b626b02-98bf-4ee5-9321-93768d97793d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1205.749443] env[62503]: DEBUG oslo_concurrency.lockutils [None req-4b626b02-98bf-4ee5-9321-93768d97793d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.606s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1205.770482] env[62503]: INFO nova.scheduler.client.report [None req-4b626b02-98bf-4ee5-9321-93768d97793d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Deleted allocations for instance 987b6101-565e-4eb2-b8af-f9afd5be38ce [ 1206.280466] env[62503]: DEBUG oslo_concurrency.lockutils [None req-4b626b02-98bf-4ee5-9321-93768d97793d tempest-ServerRescueNegativeTestJSON-1608367079 tempest-ServerRescueNegativeTestJSON-1608367079-project-member] Lock "987b6101-565e-4eb2-b8af-f9afd5be38ce" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 4.955s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1208.786345] env[62503]: DEBUG oslo_concurrency.lockutils [None req-bd650e88-60b8-4ac5-a968-c1403cbd230c tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Acquiring lock "7f314fba-9395-4ecc-8fe9-3676daa65977" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1208.786611] env[62503]: DEBUG oslo_concurrency.lockutils [None req-bd650e88-60b8-4ac5-a968-c1403cbd230c tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Lock "7f314fba-9395-4ecc-8fe9-3676daa65977" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1209.289082] env[62503]: DEBUG nova.compute.manager [None req-bd650e88-60b8-4ac5-a968-c1403cbd230c tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] [instance: 7f314fba-9395-4ecc-8fe9-3676daa65977] Starting instance... {{(pid=62503) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 1209.808337] env[62503]: DEBUG oslo_concurrency.lockutils [None req-bd650e88-60b8-4ac5-a968-c1403cbd230c tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1209.808622] env[62503]: DEBUG oslo_concurrency.lockutils [None req-bd650e88-60b8-4ac5-a968-c1403cbd230c tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1209.810107] env[62503]: INFO nova.compute.claims [None req-bd650e88-60b8-4ac5-a968-c1403cbd230c tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] [instance: 7f314fba-9395-4ecc-8fe9-3676daa65977] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1210.845338] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0aa6f439-ac12-4253-8881-05eb1f4d51a2 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.852864] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fc48fc2-ece8-4919-a35b-9c671d39199f {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.882257] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b395275-816e-4c8d-87d6-d04ad290b568 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.889265] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e14aefdb-e2e8-4a93-8a9c-47afa8e03586 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.901984] env[62503]: DEBUG nova.compute.provider_tree [None req-bd650e88-60b8-4ac5-a968-c1403cbd230c tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Inventory has not changed in ProviderTree for provider: 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1211.405388] env[62503]: DEBUG nova.scheduler.client.report [None req-bd650e88-60b8-4ac5-a968-c1403cbd230c tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Inventory has not changed for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1211.910010] env[62503]: DEBUG oslo_concurrency.lockutils [None req-bd650e88-60b8-4ac5-a968-c1403cbd230c tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.101s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1211.910624] env[62503]: DEBUG nova.compute.manager [None req-bd650e88-60b8-4ac5-a968-c1403cbd230c tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] [instance: 7f314fba-9395-4ecc-8fe9-3676daa65977] Start building networks asynchronously for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2832}} [ 1212.415352] env[62503]: DEBUG nova.compute.utils [None req-bd650e88-60b8-4ac5-a968-c1403cbd230c tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Using /dev/sd instead of None {{(pid=62503) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1212.416845] env[62503]: DEBUG nova.compute.manager [None req-bd650e88-60b8-4ac5-a968-c1403cbd230c tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] [instance: 7f314fba-9395-4ecc-8fe9-3676daa65977] Not allocating networking since 'none' was specified. {{(pid=62503) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1980}} [ 1212.918018] env[62503]: DEBUG nova.compute.manager [None req-bd650e88-60b8-4ac5-a968-c1403cbd230c tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] [instance: 7f314fba-9395-4ecc-8fe9-3676daa65977] Start building block device mappings for instance. {{(pid=62503) _build_resources /opt/stack/nova/nova/compute/manager.py:2867}} [ 1213.927803] env[62503]: DEBUG nova.compute.manager [None req-bd650e88-60b8-4ac5-a968-c1403cbd230c tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] [instance: 7f314fba-9395-4ecc-8fe9-3676daa65977] Start spawning the instance on the hypervisor. {{(pid=62503) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2641}} [ 1213.953146] env[62503]: DEBUG nova.virt.hardware [None req-bd650e88-60b8-4ac5-a968-c1403cbd230c tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:26:20Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:26:03Z,direct_url=,disk_format='vmdk',id=8150ca02-f879-471d-8913-459408f127a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26d9ee75d25b4018b8bb1fb11c8bd98c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:26:04Z,virtual_size=,visibility=), allow threads: False {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1213.953508] env[62503]: DEBUG nova.virt.hardware [None req-bd650e88-60b8-4ac5-a968-c1403cbd230c tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Flavor limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1213.953701] env[62503]: DEBUG nova.virt.hardware [None req-bd650e88-60b8-4ac5-a968-c1403cbd230c tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Image limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1213.953937] env[62503]: DEBUG nova.virt.hardware [None req-bd650e88-60b8-4ac5-a968-c1403cbd230c tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Flavor pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1213.954111] env[62503]: DEBUG nova.virt.hardware [None req-bd650e88-60b8-4ac5-a968-c1403cbd230c tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Image pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1213.954277] env[62503]: DEBUG nova.virt.hardware [None req-bd650e88-60b8-4ac5-a968-c1403cbd230c tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1213.954497] env[62503]: DEBUG nova.virt.hardware [None req-bd650e88-60b8-4ac5-a968-c1403cbd230c tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1213.954668] env[62503]: DEBUG nova.virt.hardware [None req-bd650e88-60b8-4ac5-a968-c1403cbd230c tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1213.954848] env[62503]: DEBUG nova.virt.hardware [None req-bd650e88-60b8-4ac5-a968-c1403cbd230c tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Got 1 possible topologies {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1213.955029] env[62503]: DEBUG nova.virt.hardware [None req-bd650e88-60b8-4ac5-a968-c1403cbd230c tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1213.955221] env[62503]: DEBUG nova.virt.hardware [None req-bd650e88-60b8-4ac5-a968-c1403cbd230c tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1213.956122] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-475eba8d-13b3-44a3-90db-0c593d495f27 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.965568] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d96eeb18-c42b-4df4-94b2-54e11b762303 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.978630] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-bd650e88-60b8-4ac5-a968-c1403cbd230c tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] [instance: 7f314fba-9395-4ecc-8fe9-3676daa65977] Instance VIF info [] {{(pid=62503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1213.984112] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd650e88-60b8-4ac5-a968-c1403cbd230c tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Creating folder: Project (fa443d7a590e4552bdcb8b1146f1cad1). Parent ref: group-v294540. {{(pid=62503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1213.984370] env[62503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4b58f299-64a7-4222-9941-463f1ca60482 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.993857] env[62503]: INFO nova.virt.vmwareapi.vm_util [None req-bd650e88-60b8-4ac5-a968-c1403cbd230c tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Created folder: Project (fa443d7a590e4552bdcb8b1146f1cad1) in parent group-v294540. [ 1213.994055] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd650e88-60b8-4ac5-a968-c1403cbd230c tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Creating folder: Instances. Parent ref: group-v294661. {{(pid=62503) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1213.994266] env[62503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3edee684-e26e-4a8c-bede-2f5727a22754 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.002728] env[62503]: INFO nova.virt.vmwareapi.vm_util [None req-bd650e88-60b8-4ac5-a968-c1403cbd230c tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Created folder: Instances in parent group-v294661. [ 1214.002957] env[62503]: DEBUG oslo.service.loopingcall [None req-bd650e88-60b8-4ac5-a968-c1403cbd230c tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1214.003164] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7f314fba-9395-4ecc-8fe9-3676daa65977] Creating VM on the ESX host {{(pid=62503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1214.003355] env[62503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b2ff08ce-101e-475b-adb9-a5d1df216a22 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.018302] env[62503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1214.018302] env[62503]: value = "task-1388327" [ 1214.018302] env[62503]: _type = "Task" [ 1214.018302] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1214.025904] env[62503]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388327, 'name': CreateVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1214.528390] env[62503]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388327, 'name': CreateVM_Task, 'duration_secs': 0.256947} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1214.528576] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7f314fba-9395-4ecc-8fe9-3676daa65977] Created VM on the ESX host {{(pid=62503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1214.529012] env[62503]: DEBUG oslo_concurrency.lockutils [None req-bd650e88-60b8-4ac5-a968-c1403cbd230c tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1214.529191] env[62503]: DEBUG oslo_concurrency.lockutils [None req-bd650e88-60b8-4ac5-a968-c1403cbd230c tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Acquired lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1214.529520] env[62503]: DEBUG oslo_concurrency.lockutils [None req-bd650e88-60b8-4ac5-a968-c1403cbd230c tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1214.529769] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0bed6fcc-0fe2-475f-993f-43af03742de8 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.534198] env[62503]: DEBUG oslo_vmware.api [None req-bd650e88-60b8-4ac5-a968-c1403cbd230c tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Waiting for the task: (returnval){ [ 1214.534198] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]5275b04d-14fd-32c1-3628-14e7203fb875" [ 1214.534198] env[62503]: _type = "Task" [ 1214.534198] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1214.541408] env[62503]: DEBUG oslo_vmware.api [None req-bd650e88-60b8-4ac5-a968-c1403cbd230c tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]5275b04d-14fd-32c1-3628-14e7203fb875, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1215.043829] env[62503]: DEBUG oslo_vmware.api [None req-bd650e88-60b8-4ac5-a968-c1403cbd230c tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]5275b04d-14fd-32c1-3628-14e7203fb875, 'name': SearchDatastore_Task, 'duration_secs': 0.010593} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1215.044174] env[62503]: DEBUG oslo_concurrency.lockutils [None req-bd650e88-60b8-4ac5-a968-c1403cbd230c tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Releasing lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1215.044392] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-bd650e88-60b8-4ac5-a968-c1403cbd230c tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] [instance: 7f314fba-9395-4ecc-8fe9-3676daa65977] Processing image 8150ca02-f879-471d-8913-459408f127a1 {{(pid=62503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1215.044628] env[62503]: DEBUG oslo_concurrency.lockutils [None req-bd650e88-60b8-4ac5-a968-c1403cbd230c tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1215.044789] env[62503]: DEBUG oslo_concurrency.lockutils [None req-bd650e88-60b8-4ac5-a968-c1403cbd230c tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Acquired lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1215.044968] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-bd650e88-60b8-4ac5-a968-c1403cbd230c tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1215.045240] env[62503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-95e96d84-ed56-40b4-b4bb-69f8f888436b {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.052884] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-bd650e88-60b8-4ac5-a968-c1403cbd230c tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1215.053076] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-bd650e88-60b8-4ac5-a968-c1403cbd230c tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1215.053762] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b80313de-78d1-42a1-ac84-08e87e1dbce3 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.058055] env[62503]: DEBUG oslo_vmware.api [None req-bd650e88-60b8-4ac5-a968-c1403cbd230c tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Waiting for the task: (returnval){ [ 1215.058055] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52719acf-2dc0-b534-bee7-f3487573c1e3" [ 1215.058055] env[62503]: _type = "Task" [ 1215.058055] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1215.064671] env[62503]: DEBUG oslo_vmware.api [None req-bd650e88-60b8-4ac5-a968-c1403cbd230c tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52719acf-2dc0-b534-bee7-f3487573c1e3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1215.568041] env[62503]: DEBUG oslo_vmware.api [None req-bd650e88-60b8-4ac5-a968-c1403cbd230c tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52719acf-2dc0-b534-bee7-f3487573c1e3, 'name': SearchDatastore_Task, 'duration_secs': 0.008084} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1215.568790] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5d2183ec-f46e-4b80-b84d-d5fe0856bf47 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.573497] env[62503]: DEBUG oslo_vmware.api [None req-bd650e88-60b8-4ac5-a968-c1403cbd230c tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Waiting for the task: (returnval){ [ 1215.573497] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52a90234-f0cc-7281-6e2b-716878730c10" [ 1215.573497] env[62503]: _type = "Task" [ 1215.573497] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1215.580405] env[62503]: DEBUG oslo_vmware.api [None req-bd650e88-60b8-4ac5-a968-c1403cbd230c tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52a90234-f0cc-7281-6e2b-716878730c10, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1216.083502] env[62503]: DEBUG oslo_vmware.api [None req-bd650e88-60b8-4ac5-a968-c1403cbd230c tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52a90234-f0cc-7281-6e2b-716878730c10, 'name': SearchDatastore_Task, 'duration_secs': 0.009124} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1216.083849] env[62503]: DEBUG oslo_concurrency.lockutils [None req-bd650e88-60b8-4ac5-a968-c1403cbd230c tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Releasing lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1216.084054] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd650e88-60b8-4ac5-a968-c1403cbd230c tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk to [datastore1] 7f314fba-9395-4ecc-8fe9-3676daa65977/7f314fba-9395-4ecc-8fe9-3676daa65977.vmdk {{(pid=62503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1216.084322] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5730556c-a753-4549-a85c-86316af13041 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.091099] env[62503]: DEBUG oslo_vmware.api [None req-bd650e88-60b8-4ac5-a968-c1403cbd230c tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Waiting for the task: (returnval){ [ 1216.091099] env[62503]: value = "task-1388328" [ 1216.091099] env[62503]: _type = "Task" [ 1216.091099] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1216.097869] env[62503]: DEBUG oslo_vmware.api [None req-bd650e88-60b8-4ac5-a968-c1403cbd230c tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Task: {'id': task-1388328, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1216.600901] env[62503]: DEBUG oslo_vmware.api [None req-bd650e88-60b8-4ac5-a968-c1403cbd230c tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Task: {'id': task-1388328, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.443545} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1216.601176] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd650e88-60b8-4ac5-a968-c1403cbd230c tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk to [datastore1] 7f314fba-9395-4ecc-8fe9-3676daa65977/7f314fba-9395-4ecc-8fe9-3676daa65977.vmdk {{(pid=62503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1216.601403] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-bd650e88-60b8-4ac5-a968-c1403cbd230c tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] [instance: 7f314fba-9395-4ecc-8fe9-3676daa65977] Extending root virtual disk to 1048576 {{(pid=62503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1216.601646] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-07e57137-48ce-4a1d-965e-ae3464660c13 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.607768] env[62503]: DEBUG oslo_vmware.api [None req-bd650e88-60b8-4ac5-a968-c1403cbd230c tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Waiting for the task: (returnval){ [ 1216.607768] env[62503]: value = "task-1388329" [ 1216.607768] env[62503]: _type = "Task" [ 1216.607768] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1216.616349] env[62503]: DEBUG oslo_vmware.api [None req-bd650e88-60b8-4ac5-a968-c1403cbd230c tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Task: {'id': task-1388329, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1217.117247] env[62503]: DEBUG oslo_vmware.api [None req-bd650e88-60b8-4ac5-a968-c1403cbd230c tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Task: {'id': task-1388329, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.058763} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1217.117668] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-bd650e88-60b8-4ac5-a968-c1403cbd230c tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] [instance: 7f314fba-9395-4ecc-8fe9-3676daa65977] Extended root virtual disk {{(pid=62503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1217.118274] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f4cbbe3-646f-4939-b66f-55de5b861776 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.120850] env[62503]: DEBUG oslo_service.periodic_task [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1217.121028] env[62503]: DEBUG oslo_service.periodic_task [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1217.139559] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-bd650e88-60b8-4ac5-a968-c1403cbd230c tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] [instance: 7f314fba-9395-4ecc-8fe9-3676daa65977] Reconfiguring VM instance instance-0000006d to attach disk [datastore1] 7f314fba-9395-4ecc-8fe9-3676daa65977/7f314fba-9395-4ecc-8fe9-3676daa65977.vmdk or device None with type sparse {{(pid=62503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1217.140364] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-88348bff-a431-45dd-a552-93f0619ad768 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.158697] env[62503]: DEBUG oslo_vmware.api [None req-bd650e88-60b8-4ac5-a968-c1403cbd230c tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Waiting for the task: (returnval){ [ 1217.158697] env[62503]: value = "task-1388330" [ 1217.158697] env[62503]: _type = "Task" [ 1217.158697] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1217.168883] env[62503]: DEBUG oslo_vmware.api [None req-bd650e88-60b8-4ac5-a968-c1403cbd230c tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Task: {'id': task-1388330, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1217.625535] env[62503]: DEBUG oslo_service.periodic_task [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1217.625716] env[62503]: DEBUG nova.compute.manager [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Starting heal instance info cache {{(pid=62503) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10224}} [ 1217.667861] env[62503]: DEBUG oslo_vmware.api [None req-bd650e88-60b8-4ac5-a968-c1403cbd230c tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Task: {'id': task-1388330, 'name': ReconfigVM_Task, 'duration_secs': 0.266224} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1217.668149] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-bd650e88-60b8-4ac5-a968-c1403cbd230c tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] [instance: 7f314fba-9395-4ecc-8fe9-3676daa65977] Reconfigured VM instance instance-0000006d to attach disk [datastore1] 7f314fba-9395-4ecc-8fe9-3676daa65977/7f314fba-9395-4ecc-8fe9-3676daa65977.vmdk or device None with type sparse {{(pid=62503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1217.668741] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e3b6e72d-6d7b-4211-a440-080dd08813a8 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.674937] env[62503]: DEBUG oslo_vmware.api [None req-bd650e88-60b8-4ac5-a968-c1403cbd230c tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Waiting for the task: (returnval){ [ 1217.674937] env[62503]: value = "task-1388331" [ 1217.674937] env[62503]: _type = "Task" [ 1217.674937] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1217.682129] env[62503]: DEBUG oslo_vmware.api [None req-bd650e88-60b8-4ac5-a968-c1403cbd230c tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Task: {'id': task-1388331, 'name': Rename_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1218.184480] env[62503]: DEBUG oslo_vmware.api [None req-bd650e88-60b8-4ac5-a968-c1403cbd230c tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Task: {'id': task-1388331, 'name': Rename_Task, 'duration_secs': 0.128732} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1218.184844] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd650e88-60b8-4ac5-a968-c1403cbd230c tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] [instance: 7f314fba-9395-4ecc-8fe9-3676daa65977] Powering on the VM {{(pid=62503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1218.185042] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8ad88091-29dc-49b1-957a-0458e487a564 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.190980] env[62503]: DEBUG oslo_vmware.api [None req-bd650e88-60b8-4ac5-a968-c1403cbd230c tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Waiting for the task: (returnval){ [ 1218.190980] env[62503]: value = "task-1388332" [ 1218.190980] env[62503]: _type = "Task" [ 1218.190980] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1218.197944] env[62503]: DEBUG oslo_vmware.api [None req-bd650e88-60b8-4ac5-a968-c1403cbd230c tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Task: {'id': task-1388332, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1218.700990] env[62503]: DEBUG oslo_vmware.api [None req-bd650e88-60b8-4ac5-a968-c1403cbd230c tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Task: {'id': task-1388332, 'name': PowerOnVM_Task, 'duration_secs': 0.385377} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1218.701361] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd650e88-60b8-4ac5-a968-c1403cbd230c tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] [instance: 7f314fba-9395-4ecc-8fe9-3676daa65977] Powered on the VM {{(pid=62503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1218.701667] env[62503]: INFO nova.compute.manager [None req-bd650e88-60b8-4ac5-a968-c1403cbd230c tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] [instance: 7f314fba-9395-4ecc-8fe9-3676daa65977] Took 4.77 seconds to spawn the instance on the hypervisor. [ 1218.701944] env[62503]: DEBUG nova.compute.manager [None req-bd650e88-60b8-4ac5-a968-c1403cbd230c tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] [instance: 7f314fba-9395-4ecc-8fe9-3676daa65977] Checking state {{(pid=62503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1794}} [ 1218.702751] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d07c5d13-605e-497b-8f3f-c5f8ad9d1808 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.219580] env[62503]: INFO nova.compute.manager [None req-bd650e88-60b8-4ac5-a968-c1403cbd230c tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] [instance: 7f314fba-9395-4ecc-8fe9-3676daa65977] Took 9.43 seconds to build instance. [ 1219.686699] env[62503]: INFO nova.compute.manager [None req-8edd0de9-14e5-4395-98ad-a98ba7b41423 tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] [instance: 7f314fba-9395-4ecc-8fe9-3676daa65977] Rebuilding instance [ 1219.722998] env[62503]: DEBUG oslo_concurrency.lockutils [None req-bd650e88-60b8-4ac5-a968-c1403cbd230c tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Lock "7f314fba-9395-4ecc-8fe9-3676daa65977" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 10.936s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1219.725244] env[62503]: DEBUG nova.compute.manager [None req-8edd0de9-14e5-4395-98ad-a98ba7b41423 tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] [instance: 7f314fba-9395-4ecc-8fe9-3676daa65977] Checking state {{(pid=62503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1794}} [ 1219.726092] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dd28d63-9467-44f6-9f31-64a13877b69f {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.138501] env[62503]: DEBUG nova.compute.manager [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Didn't find any instances for network info cache update. {{(pid=62503) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10310}} [ 1220.138754] env[62503]: DEBUG oslo_service.periodic_task [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1220.138845] env[62503]: DEBUG oslo_service.periodic_task [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1220.138996] env[62503]: DEBUG oslo_service.periodic_task [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1220.139172] env[62503]: DEBUG oslo_service.periodic_task [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1220.139320] env[62503]: DEBUG oslo_service.periodic_task [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1220.139469] env[62503]: DEBUG oslo_service.periodic_task [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1220.139600] env[62503]: DEBUG nova.compute.manager [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62503) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10843}} [ 1220.139766] env[62503]: DEBUG oslo_service.periodic_task [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62503) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1220.643093] env[62503]: DEBUG oslo_concurrency.lockutils [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1220.643600] env[62503]: DEBUG oslo_concurrency.lockutils [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1220.643600] env[62503]: DEBUG oslo_concurrency.lockutils [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1220.643872] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62503) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1220.644555] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63085b4d-d89e-43b3-8f4d-3bac05638948 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.652541] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46d1f652-3dfb-4117-acb9-e7ea8a4fd114 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.665862] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-433e7a83-e04e-426b-935b-c9adca4a6918 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.671994] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-347fbfe2-c556-491f-8fdb-bdfbc2f5eef4 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.700659] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181013MB free_disk=175GB free_vcpus=48 pci_devices=None {{(pid=62503) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1220.700803] env[62503]: DEBUG oslo_concurrency.lockutils [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1220.700986] env[62503]: DEBUG oslo_concurrency.lockutils [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1220.738988] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-8edd0de9-14e5-4395-98ad-a98ba7b41423 tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] [instance: 7f314fba-9395-4ecc-8fe9-3676daa65977] Powering off the VM {{(pid=62503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1220.739351] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ceb143d8-beba-48a3-9d13-6a9171639466 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.748555] env[62503]: DEBUG oslo_vmware.api [None req-8edd0de9-14e5-4395-98ad-a98ba7b41423 tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Waiting for the task: (returnval){ [ 1220.748555] env[62503]: value = "task-1388333" [ 1220.748555] env[62503]: _type = "Task" [ 1220.748555] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1220.759343] env[62503]: DEBUG oslo_vmware.api [None req-8edd0de9-14e5-4395-98ad-a98ba7b41423 tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Task: {'id': task-1388333, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1221.257331] env[62503]: DEBUG oslo_vmware.api [None req-8edd0de9-14e5-4395-98ad-a98ba7b41423 tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Task: {'id': task-1388333, 'name': PowerOffVM_Task, 'duration_secs': 0.172535} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1221.257668] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-8edd0de9-14e5-4395-98ad-a98ba7b41423 tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] [instance: 7f314fba-9395-4ecc-8fe9-3676daa65977] Powered off the VM {{(pid=62503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1221.257943] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-8edd0de9-14e5-4395-98ad-a98ba7b41423 tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] [instance: 7f314fba-9395-4ecc-8fe9-3676daa65977] Destroying instance {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1221.258712] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f6b0d03-c9b7-42de-8d95-c3e5558d3bf2 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.265359] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-8edd0de9-14e5-4395-98ad-a98ba7b41423 tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] [instance: 7f314fba-9395-4ecc-8fe9-3676daa65977] Unregistering the VM {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1221.265584] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8bf078b1-8ec1-4c66-a609-4b5d5161d900 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.288528] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-8edd0de9-14e5-4395-98ad-a98ba7b41423 tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] [instance: 7f314fba-9395-4ecc-8fe9-3676daa65977] Unregistered the VM {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1221.288746] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-8edd0de9-14e5-4395-98ad-a98ba7b41423 tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] [instance: 7f314fba-9395-4ecc-8fe9-3676daa65977] Deleting contents of the VM from datastore datastore1 {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1221.288936] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-8edd0de9-14e5-4395-98ad-a98ba7b41423 tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Deleting the datastore file [datastore1] 7f314fba-9395-4ecc-8fe9-3676daa65977 {{(pid=62503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1221.289203] env[62503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8e00e552-52c6-4441-89d5-6679aad4be8b {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.295154] env[62503]: DEBUG oslo_vmware.api [None req-8edd0de9-14e5-4395-98ad-a98ba7b41423 tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Waiting for the task: (returnval){ [ 1221.295154] env[62503]: value = "task-1388335" [ 1221.295154] env[62503]: _type = "Task" [ 1221.295154] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1221.302860] env[62503]: DEBUG oslo_vmware.api [None req-8edd0de9-14e5-4395-98ad-a98ba7b41423 tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Task: {'id': task-1388335, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1221.722023] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Instance 7f314fba-9395-4ecc-8fe9-3676daa65977 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62503) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1221.722302] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Total usable vcpus: 48, total allocated vcpus: 1 {{(pid=62503) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1221.722381] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=704MB phys_disk=200GB used_disk=1GB total_vcpus=48 used_vcpus=1 pci_stats=[] {{(pid=62503) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1221.748301] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecccc2d3-640a-44b9-97f3-ba38e31cbbc7 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.755742] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d63a0624-986a-4e75-87df-2bba9df41433 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.784168] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec8558c1-5e5f-41a0-99f3-6eb6929b6b6e {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.790884] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e4c6a4c-5ba6-49cc-a838-0c1a591dbfc4 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.805516] env[62503]: DEBUG nova.compute.provider_tree [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Updating inventory in ProviderTree for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1221.811016] env[62503]: DEBUG oslo_vmware.api [None req-8edd0de9-14e5-4395-98ad-a98ba7b41423 tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Task: {'id': task-1388335, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.090689} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1221.811252] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-8edd0de9-14e5-4395-98ad-a98ba7b41423 tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Deleted the datastore file {{(pid=62503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1221.811440] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-8edd0de9-14e5-4395-98ad-a98ba7b41423 tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] [instance: 7f314fba-9395-4ecc-8fe9-3676daa65977] Deleted contents of the VM from datastore datastore1 {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1221.811616] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-8edd0de9-14e5-4395-98ad-a98ba7b41423 tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] [instance: 7f314fba-9395-4ecc-8fe9-3676daa65977] Instance destroyed {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1222.325852] env[62503]: ERROR nova.scheduler.client.report [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] [req-29b8bca9-b403-4d5d-a522-eabb16ba1423] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-29b8bca9-b403-4d5d-a522-eabb16ba1423"}]} [ 1222.341288] env[62503]: DEBUG nova.scheduler.client.report [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Refreshing inventories for resource provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 {{(pid=62503) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:819}} [ 1222.355040] env[62503]: DEBUG nova.scheduler.client.report [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Updating ProviderTree inventory for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:783}} [ 1222.355040] env[62503]: DEBUG nova.compute.provider_tree [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Updating inventory in ProviderTree for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1222.366481] env[62503]: DEBUG nova.scheduler.client.report [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Refreshing aggregate associations for resource provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2, aggregates: None {{(pid=62503) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:828}} [ 1222.384880] env[62503]: DEBUG nova.scheduler.client.report [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Refreshing trait associations for resource provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2, traits: HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=62503) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:840}} [ 1222.408328] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3fcae55-30d4-460c-a9a9-7124676499b2 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.416347] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f3704ed-bb7d-426e-b3e0-b671ce81b116 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.446118] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f31ed69-5027-4092-869c-853e7cc56407 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.452870] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cca63709-b026-4bb9-b97a-726e57c6981b {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.466528] env[62503]: DEBUG nova.compute.provider_tree [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Updating inventory in ProviderTree for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1222.844796] env[62503]: DEBUG nova.virt.hardware [None req-8edd0de9-14e5-4395-98ad-a98ba7b41423 tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:26:20Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:26:03Z,direct_url=,disk_format='vmdk',id=8150ca02-f879-471d-8913-459408f127a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26d9ee75d25b4018b8bb1fb11c8bd98c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:26:04Z,virtual_size=,visibility=), allow threads: False {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1222.845143] env[62503]: DEBUG nova.virt.hardware [None req-8edd0de9-14e5-4395-98ad-a98ba7b41423 tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Flavor limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1222.845320] env[62503]: DEBUG nova.virt.hardware [None req-8edd0de9-14e5-4395-98ad-a98ba7b41423 tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Image limits 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1222.845512] env[62503]: DEBUG nova.virt.hardware [None req-8edd0de9-14e5-4395-98ad-a98ba7b41423 tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Flavor pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1222.845665] env[62503]: DEBUG nova.virt.hardware [None req-8edd0de9-14e5-4395-98ad-a98ba7b41423 tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Image pref 0:0:0 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1222.845817] env[62503]: DEBUG nova.virt.hardware [None req-8edd0de9-14e5-4395-98ad-a98ba7b41423 tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62503) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1222.846041] env[62503]: DEBUG nova.virt.hardware [None req-8edd0de9-14e5-4395-98ad-a98ba7b41423 tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1222.846211] env[62503]: DEBUG nova.virt.hardware [None req-8edd0de9-14e5-4395-98ad-a98ba7b41423 tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1222.846383] env[62503]: DEBUG nova.virt.hardware [None req-8edd0de9-14e5-4395-98ad-a98ba7b41423 tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Got 1 possible topologies {{(pid=62503) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1222.846548] env[62503]: DEBUG nova.virt.hardware [None req-8edd0de9-14e5-4395-98ad-a98ba7b41423 tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1222.846723] env[62503]: DEBUG nova.virt.hardware [None req-8edd0de9-14e5-4395-98ad-a98ba7b41423 tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62503) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1222.847588] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55a480cd-931e-482e-9734-94fa2919818f {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.854750] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38ab5156-2522-45de-841d-68c27fb06d83 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.867247] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-8edd0de9-14e5-4395-98ad-a98ba7b41423 tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] [instance: 7f314fba-9395-4ecc-8fe9-3676daa65977] Instance VIF info [] {{(pid=62503) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1222.872655] env[62503]: DEBUG oslo.service.loopingcall [None req-8edd0de9-14e5-4395-98ad-a98ba7b41423 tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1222.872846] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7f314fba-9395-4ecc-8fe9-3676daa65977] Creating VM on the ESX host {{(pid=62503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1222.873064] env[62503]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c2fb8a3d-7bca-4416-a52e-699b2db93b52 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.889239] env[62503]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1222.889239] env[62503]: value = "task-1388336" [ 1222.889239] env[62503]: _type = "Task" [ 1222.889239] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1222.896182] env[62503]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388336, 'name': CreateVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1222.996017] env[62503]: DEBUG nova.scheduler.client.report [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Updated inventory for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 with generation 133 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:972}} [ 1222.996296] env[62503]: DEBUG nova.compute.provider_tree [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Updating resource provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 generation from 133 to 134 during operation: update_inventory {{(pid=62503) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1222.996452] env[62503]: DEBUG nova.compute.provider_tree [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Updating inventory in ProviderTree for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1223.399020] env[62503]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388336, 'name': CreateVM_Task, 'duration_secs': 0.244553} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1223.399224] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7f314fba-9395-4ecc-8fe9-3676daa65977] Created VM on the ESX host {{(pid=62503) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1223.399643] env[62503]: DEBUG oslo_concurrency.lockutils [None req-8edd0de9-14e5-4395-98ad-a98ba7b41423 tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1223.399812] env[62503]: DEBUG oslo_concurrency.lockutils [None req-8edd0de9-14e5-4395-98ad-a98ba7b41423 tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Acquired lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1223.400167] env[62503]: DEBUG oslo_concurrency.lockutils [None req-8edd0de9-14e5-4395-98ad-a98ba7b41423 tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1223.400421] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-59b897c1-8508-4d1c-a117-6c387e1b6c3f {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.404659] env[62503]: DEBUG oslo_vmware.api [None req-8edd0de9-14e5-4395-98ad-a98ba7b41423 tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Waiting for the task: (returnval){ [ 1223.404659] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]5248e089-9198-44d5-e6e1-7f2ea48710f5" [ 1223.404659] env[62503]: _type = "Task" [ 1223.404659] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1223.411753] env[62503]: DEBUG oslo_vmware.api [None req-8edd0de9-14e5-4395-98ad-a98ba7b41423 tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]5248e089-9198-44d5-e6e1-7f2ea48710f5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1223.501695] env[62503]: DEBUG nova.compute.resource_tracker [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62503) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1223.501940] env[62503]: DEBUG oslo_concurrency.lockutils [None req-66fdc920-a83c-4ab1-8113-2824ff034cb2 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.801s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1223.915083] env[62503]: DEBUG oslo_vmware.api [None req-8edd0de9-14e5-4395-98ad-a98ba7b41423 tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]5248e089-9198-44d5-e6e1-7f2ea48710f5, 'name': SearchDatastore_Task, 'duration_secs': 0.010266} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1223.915424] env[62503]: DEBUG oslo_concurrency.lockutils [None req-8edd0de9-14e5-4395-98ad-a98ba7b41423 tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Releasing lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1223.915612] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-8edd0de9-14e5-4395-98ad-a98ba7b41423 tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] [instance: 7f314fba-9395-4ecc-8fe9-3676daa65977] Processing image 8150ca02-f879-471d-8913-459408f127a1 {{(pid=62503) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1223.915841] env[62503]: DEBUG oslo_concurrency.lockutils [None req-8edd0de9-14e5-4395-98ad-a98ba7b41423 tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1223.915994] env[62503]: DEBUG oslo_concurrency.lockutils [None req-8edd0de9-14e5-4395-98ad-a98ba7b41423 tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Acquired lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1223.916197] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-8edd0de9-14e5-4395-98ad-a98ba7b41423 tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1223.916443] env[62503]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ce7dbb31-810a-4d7e-a751-4c7d2838a42e {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.924195] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-8edd0de9-14e5-4395-98ad-a98ba7b41423 tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62503) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1223.924365] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-8edd0de9-14e5-4395-98ad-a98ba7b41423 tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62503) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1223.925069] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e76d8c7d-34b2-443d-b828-f6a513980bfc {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.929676] env[62503]: DEBUG oslo_vmware.api [None req-8edd0de9-14e5-4395-98ad-a98ba7b41423 tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Waiting for the task: (returnval){ [ 1223.929676] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]5206a4ac-4f3f-9790-35aa-4ac2ad96b24f" [ 1223.929676] env[62503]: _type = "Task" [ 1223.929676] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1223.936790] env[62503]: DEBUG oslo_vmware.api [None req-8edd0de9-14e5-4395-98ad-a98ba7b41423 tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]5206a4ac-4f3f-9790-35aa-4ac2ad96b24f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1224.440542] env[62503]: DEBUG oslo_vmware.api [None req-8edd0de9-14e5-4395-98ad-a98ba7b41423 tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]5206a4ac-4f3f-9790-35aa-4ac2ad96b24f, 'name': SearchDatastore_Task, 'duration_secs': 0.00978} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1224.441317] env[62503]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3060fb22-e783-47b2-bd57-b2672c07d956 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.445972] env[62503]: DEBUG oslo_vmware.api [None req-8edd0de9-14e5-4395-98ad-a98ba7b41423 tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Waiting for the task: (returnval){ [ 1224.445972] env[62503]: value = "session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52d37510-0998-9981-c30d-53bb5986ab43" [ 1224.445972] env[62503]: _type = "Task" [ 1224.445972] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1224.453042] env[62503]: DEBUG oslo_vmware.api [None req-8edd0de9-14e5-4395-98ad-a98ba7b41423 tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52d37510-0998-9981-c30d-53bb5986ab43, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1224.955247] env[62503]: DEBUG oslo_vmware.api [None req-8edd0de9-14e5-4395-98ad-a98ba7b41423 tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Task: {'id': session[52e8ecde-2a0b-e4ae-2e65-4d20ee2933c7]52d37510-0998-9981-c30d-53bb5986ab43, 'name': SearchDatastore_Task, 'duration_secs': 0.008796} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1224.955620] env[62503]: DEBUG oslo_concurrency.lockutils [None req-8edd0de9-14e5-4395-98ad-a98ba7b41423 tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Releasing lock "[datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1224.955766] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-8edd0de9-14e5-4395-98ad-a98ba7b41423 tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk to [datastore1] 7f314fba-9395-4ecc-8fe9-3676daa65977/7f314fba-9395-4ecc-8fe9-3676daa65977.vmdk {{(pid=62503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1224.956059] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6d3d12ba-0a1a-4674-8007-d6eaece7a6c3 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.963830] env[62503]: DEBUG oslo_vmware.api [None req-8edd0de9-14e5-4395-98ad-a98ba7b41423 tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Waiting for the task: (returnval){ [ 1224.963830] env[62503]: value = "task-1388337" [ 1224.963830] env[62503]: _type = "Task" [ 1224.963830] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1224.971525] env[62503]: DEBUG oslo_vmware.api [None req-8edd0de9-14e5-4395-98ad-a98ba7b41423 tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Task: {'id': task-1388337, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1225.473814] env[62503]: DEBUG oslo_vmware.api [None req-8edd0de9-14e5-4395-98ad-a98ba7b41423 tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Task: {'id': task-1388337, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.459418} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1225.474140] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-8edd0de9-14e5-4395-98ad-a98ba7b41423 tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/8150ca02-f879-471d-8913-459408f127a1/8150ca02-f879-471d-8913-459408f127a1.vmdk to [datastore1] 7f314fba-9395-4ecc-8fe9-3676daa65977/7f314fba-9395-4ecc-8fe9-3676daa65977.vmdk {{(pid=62503) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1225.474367] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-8edd0de9-14e5-4395-98ad-a98ba7b41423 tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] [instance: 7f314fba-9395-4ecc-8fe9-3676daa65977] Extending root virtual disk to 1048576 {{(pid=62503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1225.474617] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-cdfa8c25-8672-4267-9cca-c1874be19804 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.480245] env[62503]: DEBUG oslo_vmware.api [None req-8edd0de9-14e5-4395-98ad-a98ba7b41423 tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Waiting for the task: (returnval){ [ 1225.480245] env[62503]: value = "task-1388338" [ 1225.480245] env[62503]: _type = "Task" [ 1225.480245] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1225.487046] env[62503]: DEBUG oslo_vmware.api [None req-8edd0de9-14e5-4395-98ad-a98ba7b41423 tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Task: {'id': task-1388338, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1225.990272] env[62503]: DEBUG oslo_vmware.api [None req-8edd0de9-14e5-4395-98ad-a98ba7b41423 tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Task: {'id': task-1388338, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066277} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1225.990649] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-8edd0de9-14e5-4395-98ad-a98ba7b41423 tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] [instance: 7f314fba-9395-4ecc-8fe9-3676daa65977] Extended root virtual disk {{(pid=62503) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1225.991376] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed3ec4e8-17ce-415c-9b0e-41887634af87 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.010014] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-8edd0de9-14e5-4395-98ad-a98ba7b41423 tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] [instance: 7f314fba-9395-4ecc-8fe9-3676daa65977] Reconfiguring VM instance instance-0000006d to attach disk [datastore1] 7f314fba-9395-4ecc-8fe9-3676daa65977/7f314fba-9395-4ecc-8fe9-3676daa65977.vmdk or device None with type sparse {{(pid=62503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1226.010260] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-50e249a2-7591-4239-81de-66d50ba8b595 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.028293] env[62503]: DEBUG oslo_vmware.api [None req-8edd0de9-14e5-4395-98ad-a98ba7b41423 tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Waiting for the task: (returnval){ [ 1226.028293] env[62503]: value = "task-1388339" [ 1226.028293] env[62503]: _type = "Task" [ 1226.028293] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1226.038846] env[62503]: DEBUG oslo_vmware.api [None req-8edd0de9-14e5-4395-98ad-a98ba7b41423 tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Task: {'id': task-1388339, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1226.539038] env[62503]: DEBUG oslo_vmware.api [None req-8edd0de9-14e5-4395-98ad-a98ba7b41423 tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Task: {'id': task-1388339, 'name': ReconfigVM_Task, 'duration_secs': 0.245933} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1226.539038] env[62503]: DEBUG nova.virt.vmwareapi.volumeops [None req-8edd0de9-14e5-4395-98ad-a98ba7b41423 tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] [instance: 7f314fba-9395-4ecc-8fe9-3676daa65977] Reconfigured VM instance instance-0000006d to attach disk [datastore1] 7f314fba-9395-4ecc-8fe9-3676daa65977/7f314fba-9395-4ecc-8fe9-3676daa65977.vmdk or device None with type sparse {{(pid=62503) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1226.539038] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-cfe5f769-c430-4b70-82a0-64227da734e0 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.545611] env[62503]: DEBUG oslo_vmware.api [None req-8edd0de9-14e5-4395-98ad-a98ba7b41423 tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Waiting for the task: (returnval){ [ 1226.545611] env[62503]: value = "task-1388340" [ 1226.545611] env[62503]: _type = "Task" [ 1226.545611] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1226.552755] env[62503]: DEBUG oslo_vmware.api [None req-8edd0de9-14e5-4395-98ad-a98ba7b41423 tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Task: {'id': task-1388340, 'name': Rename_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1227.055151] env[62503]: DEBUG oslo_vmware.api [None req-8edd0de9-14e5-4395-98ad-a98ba7b41423 tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Task: {'id': task-1388340, 'name': Rename_Task, 'duration_secs': 0.122461} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1227.055504] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-8edd0de9-14e5-4395-98ad-a98ba7b41423 tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] [instance: 7f314fba-9395-4ecc-8fe9-3676daa65977] Powering on the VM {{(pid=62503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1227.055677] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f31edf65-778e-42eb-94bb-877164d64bca {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.061548] env[62503]: DEBUG oslo_vmware.api [None req-8edd0de9-14e5-4395-98ad-a98ba7b41423 tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Waiting for the task: (returnval){ [ 1227.061548] env[62503]: value = "task-1388341" [ 1227.061548] env[62503]: _type = "Task" [ 1227.061548] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1227.068599] env[62503]: DEBUG oslo_vmware.api [None req-8edd0de9-14e5-4395-98ad-a98ba7b41423 tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Task: {'id': task-1388341, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1227.571363] env[62503]: DEBUG oslo_vmware.api [None req-8edd0de9-14e5-4395-98ad-a98ba7b41423 tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Task: {'id': task-1388341, 'name': PowerOnVM_Task, 'duration_secs': 0.398092} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1227.571645] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-8edd0de9-14e5-4395-98ad-a98ba7b41423 tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] [instance: 7f314fba-9395-4ecc-8fe9-3676daa65977] Powered on the VM {{(pid=62503) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1227.571856] env[62503]: DEBUG nova.compute.manager [None req-8edd0de9-14e5-4395-98ad-a98ba7b41423 tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] [instance: 7f314fba-9395-4ecc-8fe9-3676daa65977] Checking state {{(pid=62503) _get_power_state /opt/stack/nova/nova/compute/manager.py:1794}} [ 1227.572627] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70e7232a-66c8-45d2-b554-ba2b6c951cd2 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.087542] env[62503]: DEBUG oslo_concurrency.lockutils [None req-8edd0de9-14e5-4395-98ad-a98ba7b41423 tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1228.087880] env[62503]: DEBUG oslo_concurrency.lockutils [None req-8edd0de9-14e5-4395-98ad-a98ba7b41423 tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1228.087973] env[62503]: DEBUG nova.objects.instance [None req-8edd0de9-14e5-4395-98ad-a98ba7b41423 tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] [instance: 7f314fba-9395-4ecc-8fe9-3676daa65977] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62503) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1228.289427] env[62503]: DEBUG oslo_concurrency.lockutils [None req-db47b6eb-7c07-45fa-b72f-90e024583c61 tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Acquiring lock "7f314fba-9395-4ecc-8fe9-3676daa65977" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1228.289687] env[62503]: DEBUG oslo_concurrency.lockutils [None req-db47b6eb-7c07-45fa-b72f-90e024583c61 tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Lock "7f314fba-9395-4ecc-8fe9-3676daa65977" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1228.289909] env[62503]: DEBUG oslo_concurrency.lockutils [None req-db47b6eb-7c07-45fa-b72f-90e024583c61 tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Acquiring lock "7f314fba-9395-4ecc-8fe9-3676daa65977-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1228.290110] env[62503]: DEBUG oslo_concurrency.lockutils [None req-db47b6eb-7c07-45fa-b72f-90e024583c61 tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Lock "7f314fba-9395-4ecc-8fe9-3676daa65977-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1228.290287] env[62503]: DEBUG oslo_concurrency.lockutils [None req-db47b6eb-7c07-45fa-b72f-90e024583c61 tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Lock "7f314fba-9395-4ecc-8fe9-3676daa65977-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1228.292401] env[62503]: INFO nova.compute.manager [None req-db47b6eb-7c07-45fa-b72f-90e024583c61 tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] [instance: 7f314fba-9395-4ecc-8fe9-3676daa65977] Terminating instance [ 1228.293998] env[62503]: DEBUG oslo_concurrency.lockutils [None req-db47b6eb-7c07-45fa-b72f-90e024583c61 tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Acquiring lock "refresh_cache-7f314fba-9395-4ecc-8fe9-3676daa65977" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1228.294185] env[62503]: DEBUG oslo_concurrency.lockutils [None req-db47b6eb-7c07-45fa-b72f-90e024583c61 tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Acquired lock "refresh_cache-7f314fba-9395-4ecc-8fe9-3676daa65977" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1228.294363] env[62503]: DEBUG nova.network.neutron [None req-db47b6eb-7c07-45fa-b72f-90e024583c61 tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] [instance: 7f314fba-9395-4ecc-8fe9-3676daa65977] Building network info cache for instance {{(pid=62503) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1228.813264] env[62503]: DEBUG nova.network.neutron [None req-db47b6eb-7c07-45fa-b72f-90e024583c61 tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] [instance: 7f314fba-9395-4ecc-8fe9-3676daa65977] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1228.868501] env[62503]: DEBUG nova.network.neutron [None req-db47b6eb-7c07-45fa-b72f-90e024583c61 tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] [instance: 7f314fba-9395-4ecc-8fe9-3676daa65977] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1229.096588] env[62503]: DEBUG oslo_concurrency.lockutils [None req-8edd0de9-14e5-4395-98ad-a98ba7b41423 tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.009s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1229.371711] env[62503]: DEBUG oslo_concurrency.lockutils [None req-db47b6eb-7c07-45fa-b72f-90e024583c61 tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Releasing lock "refresh_cache-7f314fba-9395-4ecc-8fe9-3676daa65977" {{(pid=62503) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1229.372145] env[62503]: DEBUG nova.compute.manager [None req-db47b6eb-7c07-45fa-b72f-90e024583c61 tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] [instance: 7f314fba-9395-4ecc-8fe9-3676daa65977] Start destroying the instance on the hypervisor. {{(pid=62503) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3156}} [ 1229.372350] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-db47b6eb-7c07-45fa-b72f-90e024583c61 tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] [instance: 7f314fba-9395-4ecc-8fe9-3676daa65977] Destroying instance {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1229.373246] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d171340-76d0-4115-bb82-4609f7bbb6fa {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.380878] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-db47b6eb-7c07-45fa-b72f-90e024583c61 tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] [instance: 7f314fba-9395-4ecc-8fe9-3676daa65977] Powering off the VM {{(pid=62503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1229.381118] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-65cfb64b-c006-4db0-ac29-d27045c765eb {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.386738] env[62503]: DEBUG oslo_vmware.api [None req-db47b6eb-7c07-45fa-b72f-90e024583c61 tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Waiting for the task: (returnval){ [ 1229.386738] env[62503]: value = "task-1388342" [ 1229.386738] env[62503]: _type = "Task" [ 1229.386738] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1229.393787] env[62503]: DEBUG oslo_vmware.api [None req-db47b6eb-7c07-45fa-b72f-90e024583c61 tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Task: {'id': task-1388342, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1229.896496] env[62503]: DEBUG oslo_vmware.api [None req-db47b6eb-7c07-45fa-b72f-90e024583c61 tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Task: {'id': task-1388342, 'name': PowerOffVM_Task, 'duration_secs': 0.18626} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1229.896745] env[62503]: DEBUG nova.virt.vmwareapi.vm_util [None req-db47b6eb-7c07-45fa-b72f-90e024583c61 tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] [instance: 7f314fba-9395-4ecc-8fe9-3676daa65977] Powered off the VM {{(pid=62503) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1229.896919] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-db47b6eb-7c07-45fa-b72f-90e024583c61 tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] [instance: 7f314fba-9395-4ecc-8fe9-3676daa65977] Unregistering the VM {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1229.897182] env[62503]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-df18f870-78e7-436b-a8b9-2b65574b5f56 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.920655] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-db47b6eb-7c07-45fa-b72f-90e024583c61 tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] [instance: 7f314fba-9395-4ecc-8fe9-3676daa65977] Unregistered the VM {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1229.920878] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-db47b6eb-7c07-45fa-b72f-90e024583c61 tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] [instance: 7f314fba-9395-4ecc-8fe9-3676daa65977] Deleting contents of the VM from datastore datastore1 {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1229.921087] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-db47b6eb-7c07-45fa-b72f-90e024583c61 tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Deleting the datastore file [datastore1] 7f314fba-9395-4ecc-8fe9-3676daa65977 {{(pid=62503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1229.921378] env[62503]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-97d19821-9f5c-436a-9569-4bc308cb35db {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.927741] env[62503]: DEBUG oslo_vmware.api [None req-db47b6eb-7c07-45fa-b72f-90e024583c61 tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Waiting for the task: (returnval){ [ 1229.927741] env[62503]: value = "task-1388344" [ 1229.927741] env[62503]: _type = "Task" [ 1229.927741] env[62503]: } to complete. {{(pid=62503) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1229.935251] env[62503]: DEBUG oslo_vmware.api [None req-db47b6eb-7c07-45fa-b72f-90e024583c61 tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Task: {'id': task-1388344, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1230.437012] env[62503]: DEBUG oslo_vmware.api [None req-db47b6eb-7c07-45fa-b72f-90e024583c61 tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Task: {'id': task-1388344, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.090754} completed successfully. {{(pid=62503) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1230.437357] env[62503]: DEBUG nova.virt.vmwareapi.ds_util [None req-db47b6eb-7c07-45fa-b72f-90e024583c61 tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Deleted the datastore file {{(pid=62503) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1230.437478] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-db47b6eb-7c07-45fa-b72f-90e024583c61 tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] [instance: 7f314fba-9395-4ecc-8fe9-3676daa65977] Deleted contents of the VM from datastore datastore1 {{(pid=62503) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1230.437656] env[62503]: DEBUG nova.virt.vmwareapi.vmops [None req-db47b6eb-7c07-45fa-b72f-90e024583c61 tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] [instance: 7f314fba-9395-4ecc-8fe9-3676daa65977] Instance destroyed {{(pid=62503) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1230.437831] env[62503]: INFO nova.compute.manager [None req-db47b6eb-7c07-45fa-b72f-90e024583c61 tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] [instance: 7f314fba-9395-4ecc-8fe9-3676daa65977] Took 1.07 seconds to destroy the instance on the hypervisor. [ 1230.438077] env[62503]: DEBUG oslo.service.loopingcall [None req-db47b6eb-7c07-45fa-b72f-90e024583c61 tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62503) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1230.438276] env[62503]: DEBUG nova.compute.manager [-] [instance: 7f314fba-9395-4ecc-8fe9-3676daa65977] Deallocating network for instance {{(pid=62503) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 1230.438369] env[62503]: DEBUG nova.network.neutron [-] [instance: 7f314fba-9395-4ecc-8fe9-3676daa65977] deallocate_for_instance() {{(pid=62503) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1230.452748] env[62503]: DEBUG nova.network.neutron [-] [instance: 7f314fba-9395-4ecc-8fe9-3676daa65977] Instance cache missing network info. {{(pid=62503) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1230.955730] env[62503]: DEBUG nova.network.neutron [-] [instance: 7f314fba-9395-4ecc-8fe9-3676daa65977] Updating instance_info_cache with network_info: [] {{(pid=62503) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1231.458697] env[62503]: INFO nova.compute.manager [-] [instance: 7f314fba-9395-4ecc-8fe9-3676daa65977] Took 1.02 seconds to deallocate network for instance. [ 1231.965337] env[62503]: DEBUG oslo_concurrency.lockutils [None req-db47b6eb-7c07-45fa-b72f-90e024583c61 tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1231.965624] env[62503]: DEBUG oslo_concurrency.lockutils [None req-db47b6eb-7c07-45fa-b72f-90e024583c61 tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1231.965839] env[62503]: DEBUG nova.objects.instance [None req-db47b6eb-7c07-45fa-b72f-90e024583c61 tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Lazy-loading 'resources' on Instance uuid 7f314fba-9395-4ecc-8fe9-3676daa65977 {{(pid=62503) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1232.494552] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b36b88b4-fed6-4903-bd0d-c7de393b8674 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.503097] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e00e5117-2a4b-423d-9f93-8bd9f5f505e8 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.531301] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-091640e2-d78a-4dd3-aa87-9db123f3280f {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.537835] env[62503]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0a3e1c4-d3a1-44d2-90a7-ada8b3a7a8e8 {{(pid=62503) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.550337] env[62503]: DEBUG nova.compute.provider_tree [None req-db47b6eb-7c07-45fa-b72f-90e024583c61 tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Updating inventory in ProviderTree for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1233.081840] env[62503]: DEBUG nova.scheduler.client.report [None req-db47b6eb-7c07-45fa-b72f-90e024583c61 tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Updated inventory for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 with generation 134 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:972}} [ 1233.081840] env[62503]: DEBUG nova.compute.provider_tree [None req-db47b6eb-7c07-45fa-b72f-90e024583c61 tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Updating resource provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 generation from 134 to 135 during operation: update_inventory {{(pid=62503) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1233.082125] env[62503]: DEBUG nova.compute.provider_tree [None req-db47b6eb-7c07-45fa-b72f-90e024583c61 tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Updating inventory in ProviderTree for provider 1b064c5b-f1db-4a5b-8a3c-ceff2e240ac2 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62503) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1233.586986] env[62503]: DEBUG oslo_concurrency.lockutils [None req-db47b6eb-7c07-45fa-b72f-90e024583c61 tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.621s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1233.606536] env[62503]: INFO nova.scheduler.client.report [None req-db47b6eb-7c07-45fa-b72f-90e024583c61 tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Deleted allocations for instance 7f314fba-9395-4ecc-8fe9-3676daa65977 [ 1234.113802] env[62503]: DEBUG oslo_concurrency.lockutils [None req-db47b6eb-7c07-45fa-b72f-90e024583c61 tempest-ServerShowV254Test-219217048 tempest-ServerShowV254Test-219217048-project-member] Lock "7f314fba-9395-4ecc-8fe9-3676daa65977" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.824s {{(pid=62503) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}}